Test Report: Docker_Linux_containerd_arm64 19651

                    
                      f000a69778791892f7d89fef6358d7150d12a198:2024-09-16:36236
                    
                

Test fail (36/229)

Order failed test Duration
29 TestAddons/serial/Volcano 294.57
31 TestAddons/serial/GCPAuth/Namespaces 0
33 TestAddons/parallel/Registry 15.67
34 TestAddons/parallel/Ingress 2.38
36 TestAddons/parallel/MetricsServer 369.13
39 TestAddons/parallel/CSI 362.5
42 TestAddons/parallel/LocalPath 0
46 TestCertOptions 41.75
68 TestFunctional/serial/KubeContext 2.2
69 TestFunctional/serial/KubectlGetPods 2.25
82 TestFunctional/serial/ComponentHealth 2.87
85 TestFunctional/serial/InvalidService 0
88 TestFunctional/parallel/DashboardCmd 7.19
95 TestFunctional/parallel/ServiceCmdConnect 2.55
97 TestFunctional/parallel/PersistentVolumeClaim 101.82
107 TestFunctional/parallel/NodeLabels 4.37
116 TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup 0
117 TestFunctional/parallel/TunnelCmd/serial/AccessDirect 107.16
118 TestFunctional/parallel/ServiceCmd/DeployApp 0
119 TestFunctional/parallel/ServiceCmd/List 0.32
120 TestFunctional/parallel/ServiceCmd/JSONOutput 0.34
121 TestFunctional/parallel/ServiceCmd/HTTPS 0.34
122 TestFunctional/parallel/ServiceCmd/Format 0.35
123 TestFunctional/parallel/ServiceCmd/URL 0.34
131 TestFunctional/parallel/MountCmd/any-port 2.71
162 TestMultiControlPlane/serial/NodeLabels 3.05
167 TestMultiControlPlane/serial/RestartSecondaryNode 21.47
170 TestMultiControlPlane/serial/DeleteSecondaryNode 13.23
173 TestMultiControlPlane/serial/RestartCluster 81.26
229 TestMultiNode/serial/MultiNodeLabels 2.63
233 TestMultiNode/serial/StartAfterStop 12.03
235 TestMultiNode/serial/DeleteNode 9.28
237 TestMultiNode/serial/RestartMultiNode 51.6
243 TestPreload 18.42
251 TestKubernetesUpgrade 346.35
361 TestNetworkPlugins/group/custom-flannel/NetCatPod 7200.07
x
+
TestAddons/serial/Volcano (294.57s)

                                                
                                                
=== RUN   TestAddons/serial/Volcano
addons_test.go:897: volcano-scheduler stabilized in 65.819178ms
addons_test.go:913: volcano-controller stabilized in 65.900121ms
addons_test.go:905: volcano-admission stabilized in 65.972358ms
addons_test.go:919: (dbg) TestAddons/serial/Volcano: waiting 6m0s for pods matching "app=volcano-scheduler" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-scheduler-576bc46687-xwjbn" [de3bf2b7-3b18-4d0c-b853-d66676efb044] Running
addons_test.go:919: (dbg) TestAddons/serial/Volcano: app=volcano-scheduler healthy within 5.010366427s
addons_test.go:923: (dbg) TestAddons/serial/Volcano: waiting 6m0s for pods matching "app=volcano-admission" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-admission-77d7d48b68-sjxcs" [2e414e00-44d5-4222-ab92-c70fd3144478] Running
addons_test.go:923: (dbg) TestAddons/serial/Volcano: app=volcano-admission healthy within 5.003962692s
addons_test.go:927: (dbg) TestAddons/serial/Volcano: waiting 6m0s for pods matching "app=volcano-controller" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-controllers-56675bb4d5-2ltwp" [fdd6c718-1ed7-4213-a6f3-500e22f1f4d3] Running
addons_test.go:927: (dbg) TestAddons/serial/Volcano: app=volcano-controller healthy within 5.004408854s
addons_test.go:932: (dbg) Run:  kubectl --context addons-451841 delete -n volcano-system job volcano-admission-init
addons_test.go:932: (dbg) Non-zero exit: kubectl --context addons-451841 delete -n volcano-system job volcano-admission-init: fork/exec /usr/local/bin/kubectl: exec format error (523.205µs)
addons_test.go:934: vcjob creation with kubectl --context addons-451841 delete -n volcano-system job volcano-admission-init failed: fork/exec /usr/local/bin/kubectl: exec format error
addons_test.go:938: (dbg) Run:  kubectl --context addons-451841 create -f testdata/vcjob.yaml
addons_test.go:938: (dbg) Non-zero exit: kubectl --context addons-451841 create -f testdata/vcjob.yaml: fork/exec /usr/local/bin/kubectl: exec format error (207.613µs)
addons_test.go:940: vcjob creation with kubectl --context addons-451841 create -f testdata/vcjob.yaml failed: fork/exec /usr/local/bin/kubectl: exec format error
addons_test.go:946: (dbg) Run:  kubectl --context addons-451841 get vcjob -n my-volcano
addons_test.go:946: (dbg) Non-zero exit: kubectl --context addons-451841 get vcjob -n my-volcano: fork/exec /usr/local/bin/kubectl: exec format error (376.712µs)
addons_test.go:946: (dbg) Run:  kubectl --context addons-451841 get vcjob -n my-volcano
addons_test.go:946: (dbg) Non-zero exit: kubectl --context addons-451841 get vcjob -n my-volcano: fork/exec /usr/local/bin/kubectl: exec format error (427.14µs)
addons_test.go:946: (dbg) Run:  kubectl --context addons-451841 get vcjob -n my-volcano
addons_test.go:946: (dbg) Non-zero exit: kubectl --context addons-451841 get vcjob -n my-volcano: fork/exec /usr/local/bin/kubectl: exec format error (3.83569ms)
addons_test.go:946: (dbg) Run:  kubectl --context addons-451841 get vcjob -n my-volcano
addons_test.go:946: (dbg) Non-zero exit: kubectl --context addons-451841 get vcjob -n my-volcano: fork/exec /usr/local/bin/kubectl: exec format error (320.515µs)
addons_test.go:946: (dbg) Run:  kubectl --context addons-451841 get vcjob -n my-volcano
addons_test.go:946: (dbg) Non-zero exit: kubectl --context addons-451841 get vcjob -n my-volcano: fork/exec /usr/local/bin/kubectl: exec format error (499.91µs)
addons_test.go:946: (dbg) Run:  kubectl --context addons-451841 get vcjob -n my-volcano
addons_test.go:946: (dbg) Non-zero exit: kubectl --context addons-451841 get vcjob -n my-volcano: fork/exec /usr/local/bin/kubectl: exec format error (528.431µs)
addons_test.go:946: (dbg) Run:  kubectl --context addons-451841 get vcjob -n my-volcano
addons_test.go:946: (dbg) Non-zero exit: kubectl --context addons-451841 get vcjob -n my-volcano: fork/exec /usr/local/bin/kubectl: exec format error (423.226µs)
addons_test.go:946: (dbg) Run:  kubectl --context addons-451841 get vcjob -n my-volcano
addons_test.go:946: (dbg) Non-zero exit: kubectl --context addons-451841 get vcjob -n my-volcano: fork/exec /usr/local/bin/kubectl: exec format error (346.599µs)
addons_test.go:960: failed checking volcano: fork/exec /usr/local/bin/kubectl: exec format error
addons_test.go:964: (dbg) TestAddons/serial/Volcano: waiting 3m0s for pods matching "volcano.sh/job-name=test-job" in namespace "my-volcano" ...
helpers_test.go:329: TestAddons/serial/Volcano: WARNING: pod list for "my-volcano" "volcano.sh/job-name=test-job" returned: client rate limiter Wait returned an error: rate: Wait(n=1) would exceed context deadline
addons_test.go:964: ***** TestAddons/serial/Volcano: pod "volcano.sh/job-name=test-job" failed to start within 3m0s: context deadline exceeded ****
addons_test.go:964: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-451841 -n addons-451841
addons_test.go:964: TestAddons/serial/Volcano: showing logs for failed pods as of 2024-09-16 10:38:26.03488427 +0000 UTC m=+577.676284427
addons_test.go:965: failed waiting for test-local-path pod: volcano.sh/job-name=test-job within 3m0s: context deadline exceeded
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/serial/Volcano]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-451841
helpers_test.go:235: (dbg) docker inspect addons-451841:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4",
	        "Created": "2024-09-16T10:30:19.386072283Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2064804,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:30:19.514500967Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/hostname",
	        "HostsPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/hosts",
	        "LogPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4-json.log",
	        "Name": "/addons-451841",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-451841:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-451841",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/merged",
	                "UpperDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/diff",
	                "WorkDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-451841",
	                "Source": "/var/lib/docker/volumes/addons-451841/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-451841",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-451841",
	                "name.minikube.sigs.k8s.io": "addons-451841",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "4da7b9dd4db914ae48304dba9ae2b2fb9dab68040bc986bf2751a778e62e4524",
	            "SandboxKey": "/var/run/docker/netns/4da7b9dd4db9",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40577"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40578"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40581"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40579"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40580"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-451841": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "cd1315f485e3267c82ac80908081e901323e720ef1bb26de92d612c54dfd58d8",
	                    "EndpointID": "36f212e2a713c67d6c2ea54e50fbd0d8d7f7eb862ef913caa03a6cbfac71cb21",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-451841",
	                        "8a213d4c4dec"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-451841 -n addons-451841
helpers_test.go:244: <<< TestAddons/serial/Volcano FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/serial/Volcano]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-451841 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-451841 logs -n 25: (1.619425289s)
helpers_test.go:252: TestAddons/serial/Volcano logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                 Args                 |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC |                     |
	|         | -p download-only-911311              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0         |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-911311              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | -o=json --download-only              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | -p download-only-889126              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.31.1         |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-889126              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-911311              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-889126              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | --download-only -p                   | download-docker-956530 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | download-docker-956530               |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | -p download-docker-956530            | download-docker-956530 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | --download-only -p                   | binary-mirror-852743   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | binary-mirror-852743                 |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --binary-mirror                      |                        |         |         |                     |                     |
	|         | http://127.0.0.1:35351               |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-852743              | binary-mirror-852743   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| addons  | disable dashboard -p                 | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| addons  | enable dashboard -p                  | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| start   | -p addons-451841 --wait=true         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:33 UTC |
	|         | --memory=4000 --alsologtostderr      |                        |         |         |                     |                     |
	|         | --addons=registry                    |                        |         |         |                     |                     |
	|         | --addons=metrics-server              |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots             |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver         |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                    |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner               |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget            |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin        |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --addons=ingress                     |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                 |                        |         |         |                     |                     |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:29:55
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:29:55.756900 2064308 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:29:55.757118 2064308 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:55.757146 2064308 out.go:358] Setting ErrFile to fd 2...
	I0916 10:29:55.757164 2064308 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:55.757443 2064308 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:29:55.757918 2064308 out.go:352] Setting JSON to false
	I0916 10:29:55.758950 2064308 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":137538,"bootTime":1726345058,"procs":192,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:29:55.759050 2064308 start.go:139] virtualization:  
	I0916 10:29:55.762450 2064308 out.go:177] * [addons-451841] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:29:55.765218 2064308 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:29:55.765320 2064308 notify.go:220] Checking for updates...
	I0916 10:29:55.771607 2064308 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:29:55.774426 2064308 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:29:55.777761 2064308 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:29:55.780330 2064308 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:29:55.782904 2064308 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:29:55.785688 2064308 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:29:55.807382 2064308 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:29:55.807515 2064308 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:55.863178 2064308 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:49 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:55.853088898 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:55.863303 2064308 docker.go:318] overlay module found
	I0916 10:29:55.867792 2064308 out.go:177] * Using the docker driver based on user configuration
	I0916 10:29:55.870461 2064308 start.go:297] selected driver: docker
	I0916 10:29:55.870476 2064308 start.go:901] validating driver "docker" against <nil>
	I0916 10:29:55.870490 2064308 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:29:55.871367 2064308 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:55.922454 2064308 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:49 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:55.912678011 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:55.922666 2064308 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:29:55.922995 2064308 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:29:55.925501 2064308 out.go:177] * Using Docker driver with root privileges
	I0916 10:29:55.928402 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:29:55.928468 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:29:55.928481 2064308 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:29:55.928561 2064308 start.go:340] cluster config:
	{Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHA
uthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:29:55.931349 2064308 out.go:177] * Starting "addons-451841" primary control-plane node in "addons-451841" cluster
	I0916 10:29:55.933847 2064308 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:29:55.936549 2064308 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:29:55.939027 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:29:55.939075 2064308 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:29:55.939087 2064308 cache.go:56] Caching tarball of preloaded images
	I0916 10:29:55.939127 2064308 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:29:55.939172 2064308 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:29:55.939183 2064308 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:29:55.939554 2064308 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json ...
	I0916 10:29:55.939585 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json: {Name:mk4b86ccd0e04a15f77246bcc432382e6ef83bd3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:29:55.955829 2064308 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:29:55.955957 2064308 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:29:55.955999 2064308 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:29:55.956009 2064308 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:29:55.956017 2064308 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:29:55.956025 2064308 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:30:13.033213 2064308 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:30:13.033255 2064308 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:30:13.033286 2064308 start.go:360] acquireMachinesLock for addons-451841: {Name:mk3e70771a060125a26a792bbbf3ad5672ad97bd Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:30:13.033421 2064308 start.go:364] duration metric: took 111.614µs to acquireMachinesLock for "addons-451841"
	I0916 10:30:13.033454 2064308 start.go:93] Provisioning new machine with config: &{Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fa
lse CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:30:13.033622 2064308 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:30:13.035916 2064308 out.go:235] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0916 10:30:13.036188 2064308 start.go:159] libmachine.API.Create for "addons-451841" (driver="docker")
	I0916 10:30:13.036228 2064308 client.go:168] LocalClient.Create starting
	I0916 10:30:13.036363 2064308 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 10:30:13.386329 2064308 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 10:30:13.561829 2064308 cli_runner.go:164] Run: docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:30:13.576129 2064308 cli_runner.go:211] docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:30:13.576212 2064308 network_create.go:284] running [docker network inspect addons-451841] to gather additional debugging logs...
	I0916 10:30:13.576235 2064308 cli_runner.go:164] Run: docker network inspect addons-451841
	W0916 10:30:13.591552 2064308 cli_runner.go:211] docker network inspect addons-451841 returned with exit code 1
	I0916 10:30:13.591606 2064308 network_create.go:287] error running [docker network inspect addons-451841]: docker network inspect addons-451841: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-451841 not found
	I0916 10:30:13.591621 2064308 network_create.go:289] output of [docker network inspect addons-451841]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-451841 not found
	
	** /stderr **
	I0916 10:30:13.591720 2064308 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:30:13.608306 2064308 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001aeacb0}
	I0916 10:30:13.608356 2064308 network_create.go:124] attempt to create docker network addons-451841 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:30:13.608420 2064308 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-451841 addons-451841
	I0916 10:30:13.683378 2064308 network_create.go:108] docker network addons-451841 192.168.49.0/24 created
	I0916 10:30:13.683411 2064308 kic.go:121] calculated static IP "192.168.49.2" for the "addons-451841" container
	I0916 10:30:13.683492 2064308 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:30:13.700184 2064308 cli_runner.go:164] Run: docker volume create addons-451841 --label name.minikube.sigs.k8s.io=addons-451841 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:30:13.718068 2064308 oci.go:103] Successfully created a docker volume addons-451841
	I0916 10:30:13.718179 2064308 cli_runner.go:164] Run: docker run --rm --name addons-451841-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --entrypoint /usr/bin/test -v addons-451841:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:30:15.421383 2064308 cli_runner.go:217] Completed: docker run --rm --name addons-451841-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --entrypoint /usr/bin/test -v addons-451841:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib: (1.703150378s)
	I0916 10:30:15.421417 2064308 oci.go:107] Successfully prepared a docker volume addons-451841
	I0916 10:30:15.421439 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:30:15.421458 2064308 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:30:15.421522 2064308 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-451841:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:30:19.320511 2064308 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-451841:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (3.898937148s)
	I0916 10:30:19.320548 2064308 kic.go:203] duration metric: took 3.899086612s to extract preloaded images to volume ...
	W0916 10:30:19.320695 2064308 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:30:19.320803 2064308 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:30:19.371670 2064308 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-451841 --name addons-451841 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-451841 --network addons-451841 --ip 192.168.49.2 --volume addons-451841:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:30:19.674459 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Running}}
	I0916 10:30:19.700795 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:19.725169 2064308 cli_runner.go:164] Run: docker exec addons-451841 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:30:19.777409 2064308 oci.go:144] the created container "addons-451841" has a running status.
	I0916 10:30:19.777438 2064308 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa...
	I0916 10:30:20.426549 2064308 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:30:20.459111 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:20.485764 2064308 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:30:20.485788 2064308 kic_runner.go:114] Args: [docker exec --privileged addons-451841 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:30:20.553044 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:20.584488 2064308 machine.go:93] provisionDockerMachine start ...
	I0916 10:30:20.584585 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.604705 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.605002 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.605024 2064308 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:30:20.750295 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-451841
	
	I0916 10:30:20.750323 2064308 ubuntu.go:169] provisioning hostname "addons-451841"
	I0916 10:30:20.750394 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.772671 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.772910 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.772922 2064308 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-451841 && echo "addons-451841" | sudo tee /etc/hostname
	I0916 10:30:20.923316 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-451841
	
	I0916 10:30:20.923448 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.940021 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.940274 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.940298 2064308 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-451841' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-451841/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-451841' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:30:21.087110 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:30:21.087184 2064308 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:30:21.087263 2064308 ubuntu.go:177] setting up certificates
	I0916 10:30:21.087293 2064308 provision.go:84] configureAuth start
	I0916 10:30:21.087450 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.105254 2064308 provision.go:143] copyHostCerts
	I0916 10:30:21.105342 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:30:21.105468 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:30:21.105537 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:30:21.105585 2064308 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.addons-451841 san=[127.0.0.1 192.168.49.2 addons-451841 localhost minikube]
	I0916 10:30:21.497343 2064308 provision.go:177] copyRemoteCerts
	I0916 10:30:21.497413 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:30:21.497456 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.514957 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.611658 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:30:21.636890 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:30:21.662172 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:30:21.686808 2064308 provision.go:87] duration metric: took 599.477164ms to configureAuth
	I0916 10:30:21.686873 2064308 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:30:21.687116 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:21.687133 2064308 machine.go:96] duration metric: took 1.102625588s to provisionDockerMachine
	I0916 10:30:21.687141 2064308 client.go:171] duration metric: took 8.650903893s to LocalClient.Create
	I0916 10:30:21.687161 2064308 start.go:167] duration metric: took 8.650974974s to libmachine.API.Create "addons-451841"
	I0916 10:30:21.687171 2064308 start.go:293] postStartSetup for "addons-451841" (driver="docker")
	I0916 10:30:21.687182 2064308 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:30:21.687249 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:30:21.687299 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.706431 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.804065 2064308 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:30:21.807409 2064308 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:30:21.807450 2064308 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:30:21.807462 2064308 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:30:21.807470 2064308 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:30:21.807482 2064308 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:30:21.807551 2064308 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:30:21.807581 2064308 start.go:296] duration metric: took 120.403063ms for postStartSetup
	I0916 10:30:21.807904 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.824820 2064308 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json ...
	I0916 10:30:21.825120 2064308 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:30:21.825171 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.841557 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.935711 2064308 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:30:21.940289 2064308 start.go:128] duration metric: took 8.906649729s to createHost
	I0916 10:30:21.940328 2064308 start.go:83] releasing machines lock for "addons-451841", held for 8.906892895s
	I0916 10:30:21.940401 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.957512 2064308 ssh_runner.go:195] Run: cat /version.json
	I0916 10:30:21.957582 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.957842 2064308 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:30:21.957901 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.986070 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.992358 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:22.209233 2064308 ssh_runner.go:195] Run: systemctl --version
	I0916 10:30:22.213896 2064308 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:30:22.218111 2064308 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:30:22.243931 2064308 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:30:22.244032 2064308 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:30:22.274074 2064308 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:30:22.274104 2064308 start.go:495] detecting cgroup driver to use...
	I0916 10:30:22.274139 2064308 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:30:22.274194 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:30:22.287113 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:30:22.299302 2064308 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:30:22.299412 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:30:22.313515 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:30:22.327839 2064308 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:30:22.409410 2064308 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:30:22.494962 2064308 docker.go:233] disabling docker service ...
	I0916 10:30:22.495100 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:30:22.515205 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:30:22.527495 2064308 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:30:22.611444 2064308 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:30:22.705471 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:30:22.717496 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:30:22.735435 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:30:22.746124 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:30:22.757226 2064308 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:30:22.757299 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:30:22.767541 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:30:22.779039 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:30:22.788821 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:30:22.799244 2064308 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:30:22.808704 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:30:22.820713 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:30:22.831851 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:30:22.842394 2064308 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:30:22.851545 2064308 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:30:22.860424 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:22.961475 2064308 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:30:23.100987 2064308 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:30:23.101138 2064308 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:30:23.105001 2064308 start.go:563] Will wait 60s for crictl version
	I0916 10:30:23.105079 2064308 ssh_runner.go:195] Run: which crictl
	I0916 10:30:23.108696 2064308 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:30:23.154724 2064308 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:30:23.154812 2064308 ssh_runner.go:195] Run: containerd --version
	I0916 10:30:23.179902 2064308 ssh_runner.go:195] Run: containerd --version
	I0916 10:30:23.208730 2064308 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:30:23.210246 2064308 cli_runner.go:164] Run: docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:30:23.225302 2064308 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:30:23.229071 2064308 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:30:23.240048 2064308 kubeadm.go:883] updating cluster {Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Cus
tomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:30:23.240172 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:30:23.240246 2064308 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:30:23.276242 2064308 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:30:23.276266 2064308 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:30:23.276331 2064308 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:30:23.312895 2064308 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:30:23.312924 2064308 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:30:23.312933 2064308 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 containerd true true} ...
	I0916 10:30:23.313028 2064308 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=addons-451841 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:30:23.313095 2064308 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:30:23.348552 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:30:23.348577 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:30:23.348587 2064308 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:30:23.348609 2064308 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-451841 NodeName:addons-451841 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc
/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:30:23.348742 2064308 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "addons-451841"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:30:23.348817 2064308 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:30:23.357634 2064308 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:30:23.357705 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:30:23.366468 2064308 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:30:23.385942 2064308 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:30:23.404422 2064308 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2167 bytes)
	I0916 10:30:23.422831 2064308 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:30:23.426382 2064308 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:30:23.437337 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:23.533359 2064308 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:30:23.547523 2064308 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841 for IP: 192.168.49.2
	I0916 10:30:23.547546 2064308 certs.go:194] generating shared ca certs ...
	I0916 10:30:23.547562 2064308 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:23.548238 2064308 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:30:24.056004 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt ...
	I0916 10:30:24.056043 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt: {Name:mk8fa0c4ced40ca68ac874100ce374f588dfea0b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.056261 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key ...
	I0916 10:30:24.056276 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key: {Name:mk04aab579c9f6bfd22c8de7442d64e7264cf4f3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.056381 2064308 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:30:24.923761 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt ...
	I0916 10:30:24.923793 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt: {Name:mke93617c0d085600c816f9e0c290a24fbe662eb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.923996 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key ...
	I0916 10:30:24.924009 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key: {Name:mk45200538cf11f718e98e7cfef8cbfcd0dafedf Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.924099 2064308 certs.go:256] generating profile certs ...
	I0916 10:30:24.924161 2064308 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key
	I0916 10:30:24.924189 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt with IP's: []
	I0916 10:30:25.053524 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt ...
	I0916 10:30:25.053557 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: {Name:mk37fa0b7d204f82c8af039a0f580deae8708ef5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.053750 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key ...
	I0916 10:30:25.053764 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key: {Name:mkdb13343be22c0a0f72ff55f3a3cbca00768e68 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.053853 2064308 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707
	I0916 10:30:25.053877 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0916 10:30:25.726904 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 ...
	I0916 10:30:25.726937 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707: {Name:mkf1dd897eefb9f7916ec8408e62b2271e638207 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.727141 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707 ...
	I0916 10:30:25.727156 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707: {Name:mkfbc7b493bc2e7d0b9e7f941111c820f07e3e82 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.727261 2064308 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt
	I0916 10:30:25.727361 2064308 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key
	I0916 10:30:25.727418 2064308 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key
	I0916 10:30:25.727439 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt with IP's: []
	I0916 10:30:26.011801 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt ...
	I0916 10:30:26.011842 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt: {Name:mkb21e9e32e986ac8dbc5fbe6c0db427fdb116ee Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:26.012049 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key ...
	I0916 10:30:26.012065 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key: {Name:mk95b366411d26459b0f1e143cac6384a51d5dfb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:26.012320 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:30:26.012368 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:30:26.012401 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:30:26.012429 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:30:26.013083 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:30:26.039152 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:30:26.064366 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:30:26.093086 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:30:26.116868 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0916 10:30:26.141663 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:30:26.166725 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:30:26.191142 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:30:26.214975 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:30:26.238979 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:30:26.256459 2064308 ssh_runner.go:195] Run: openssl version
	I0916 10:30:26.262089 2064308 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:30:26.271478 2064308 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.274966 2064308 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.275035 2064308 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.281888 2064308 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:30:26.291290 2064308 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:30:26.294471 2064308 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:30:26.294534 2064308 kubeadm.go:392] StartCluster: {Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Custom
QemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:30:26.294629 2064308 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:30:26.294715 2064308 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:30:26.332648 2064308 cri.go:89] found id: ""
	I0916 10:30:26.332740 2064308 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:30:26.341585 2064308 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:30:26.350524 2064308 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:30:26.350588 2064308 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:30:26.359218 2064308 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:30:26.359240 2064308 kubeadm.go:157] found existing configuration files:
	
	I0916 10:30:26.359319 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:30:26.368227 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:30:26.368297 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:30:26.377781 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:30:26.386494 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:30:26.386567 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:30:26.394932 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:30:26.403622 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:30:26.403687 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:30:26.412005 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:30:26.420862 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:30:26.420957 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:30:26.429543 2064308 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:30:26.471767 2064308 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:30:26.472019 2064308 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:30:26.498827 2064308 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:30:26.498904 2064308 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:30:26.498947 2064308 kubeadm.go:310] OS: Linux
	I0916 10:30:26.498998 2064308 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:30:26.499052 2064308 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:30:26.499103 2064308 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:30:26.499154 2064308 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:30:26.499218 2064308 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:30:26.499270 2064308 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:30:26.499320 2064308 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:30:26.499375 2064308 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:30:26.499426 2064308 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:30:26.577650 2064308 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:30:26.577762 2064308 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:30:26.577859 2064308 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:30:26.583045 2064308 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:30:26.586527 2064308 out.go:235]   - Generating certificates and keys ...
	I0916 10:30:26.586988 2064308 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:30:26.587103 2064308 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:30:26.754645 2064308 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:30:27.554793 2064308 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:30:28.039725 2064308 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:30:28.690015 2064308 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:30:29.764620 2064308 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:30:29.764907 2064308 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [addons-451841 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:30:30.341274 2064308 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:30:30.342274 2064308 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [addons-451841 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:30:30.576739 2064308 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:30:31.765912 2064308 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:30:33.601844 2064308 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:30:33.602129 2064308 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:30:34.584274 2064308 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:30:35.213888 2064308 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:30:35.990415 2064308 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:30:36.165269 2064308 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:30:36.564139 2064308 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:30:36.565009 2064308 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:30:36.568128 2064308 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:30:36.570826 2064308 out.go:235]   - Booting up control plane ...
	I0916 10:30:36.570944 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:30:36.571026 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:30:36.571834 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:30:36.583080 2064308 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:30:36.589082 2064308 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:30:36.589162 2064308 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:30:36.685676 2064308 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:30:36.685796 2064308 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:30:37.686643 2064308 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00127007s
	I0916 10:30:37.686760 2064308 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:30:44.689772 2064308 kubeadm.go:310] [api-check] The API server is healthy after 7.003101119s
	I0916 10:30:44.709044 2064308 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:30:44.727931 2064308 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:30:44.754458 2064308 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:30:44.754737 2064308 kubeadm.go:310] [mark-control-plane] Marking the node addons-451841 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:30:44.766739 2064308 kubeadm.go:310] [bootstrap-token] Using token: dx9pov.rexyyitopznv0w4v
	I0916 10:30:44.769416 2064308 out.go:235]   - Configuring RBAC rules ...
	I0916 10:30:44.769548 2064308 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:30:44.776785 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:30:44.785617 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:30:44.789704 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:30:44.794016 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:30:44.798127 2064308 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:30:45.099673 2064308 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:30:45.534575 2064308 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:30:46.098271 2064308 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:30:46.099422 2064308 kubeadm.go:310] 
	I0916 10:30:46.099510 2064308 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:30:46.099519 2064308 kubeadm.go:310] 
	I0916 10:30:46.099624 2064308 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:30:46.099640 2064308 kubeadm.go:310] 
	I0916 10:30:46.099673 2064308 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:30:46.099733 2064308 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:30:46.099783 2064308 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:30:46.099787 2064308 kubeadm.go:310] 
	I0916 10:30:46.099841 2064308 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:30:46.099846 2064308 kubeadm.go:310] 
	I0916 10:30:46.099898 2064308 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:30:46.099903 2064308 kubeadm.go:310] 
	I0916 10:30:46.099959 2064308 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:30:46.100036 2064308 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:30:46.100108 2064308 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:30:46.100113 2064308 kubeadm.go:310] 
	I0916 10:30:46.100201 2064308 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:30:46.100280 2064308 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:30:46.100285 2064308 kubeadm.go:310] 
	I0916 10:30:46.100377 2064308 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token dx9pov.rexyyitopznv0w4v \
	I0916 10:30:46.100482 2064308 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 10:30:46.100503 2064308 kubeadm.go:310] 	--control-plane 
	I0916 10:30:46.100507 2064308 kubeadm.go:310] 
	I0916 10:30:46.100599 2064308 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:30:46.100604 2064308 kubeadm.go:310] 
	I0916 10:30:46.100684 2064308 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token dx9pov.rexyyitopznv0w4v \
	I0916 10:30:46.100792 2064308 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 10:30:46.104209 2064308 kubeadm.go:310] W0916 10:30:26.468492    1025 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:30:46.104508 2064308 kubeadm.go:310] W0916 10:30:26.469422    1025 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:30:46.104733 2064308 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:30:46.104841 2064308 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:30:46.104863 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:30:46.104872 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:30:46.107753 2064308 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:30:46.110419 2064308 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:30:46.114304 2064308 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:30:46.114327 2064308 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:30:46.132060 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:30:46.405649 2064308 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:30:46.405772 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:46.405844 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-451841 minikube.k8s.io/updated_at=2024_09_16T10_30_46_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=addons-451841 minikube.k8s.io/primary=true
	I0916 10:30:46.544610 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:46.544668 2064308 ops.go:34] apiserver oom_adj: -16
	I0916 10:30:47.045343 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:47.544713 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:48.045593 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:48.545262 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:49.044804 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:49.545373 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:50.045197 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:50.206616 2064308 kubeadm.go:1113] duration metric: took 3.800886781s to wait for elevateKubeSystemPrivileges
	I0916 10:30:50.206650 2064308 kubeadm.go:394] duration metric: took 23.912135022s to StartCluster
	I0916 10:30:50.206760 2064308 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:50.206888 2064308 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:30:50.207291 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:50.207495 2064308 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:30:50.207664 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:30:50.207912 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:50.207954 2064308 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0916 10:30:50.208037 2064308 addons.go:69] Setting yakd=true in profile "addons-451841"
	I0916 10:30:50.208056 2064308 addons.go:234] Setting addon yakd=true in "addons-451841"
	I0916 10:30:50.208079 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.208590 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.209289 2064308 addons.go:69] Setting metrics-server=true in profile "addons-451841"
	I0916 10:30:50.209312 2064308 addons.go:234] Setting addon metrics-server=true in "addons-451841"
	I0916 10:30:50.209362 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.209903 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.210207 2064308 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-451841"
	I0916 10:30:50.210240 2064308 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-451841"
	I0916 10:30:50.210263 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.210767 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.218758 2064308 addons.go:69] Setting registry=true in profile "addons-451841"
	I0916 10:30:50.218798 2064308 addons.go:234] Setting addon registry=true in "addons-451841"
	I0916 10:30:50.218832 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.219427 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.219602 2064308 addons.go:69] Setting cloud-spanner=true in profile "addons-451841"
	I0916 10:30:50.219647 2064308 addons.go:234] Setting addon cloud-spanner=true in "addons-451841"
	I0916 10:30:50.219685 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.221722 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.222266 2064308 addons.go:69] Setting storage-provisioner=true in profile "addons-451841"
	I0916 10:30:50.222288 2064308 addons.go:234] Setting addon storage-provisioner=true in "addons-451841"
	I0916 10:30:50.222314 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.222854 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.243982 2064308 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-451841"
	I0916 10:30:50.244056 2064308 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-451841"
	I0916 10:30:50.244103 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.244878 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.250996 2064308 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-451841"
	I0916 10:30:50.251033 2064308 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-451841"
	I0916 10:30:50.251403 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.262479 2064308 addons.go:69] Setting volcano=true in profile "addons-451841"
	I0916 10:30:50.262526 2064308 addons.go:234] Setting addon volcano=true in "addons-451841"
	I0916 10:30:50.262567 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.263124 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.310432 2064308 addons.go:69] Setting default-storageclass=true in profile "addons-451841"
	I0916 10:30:50.310537 2064308 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-451841"
	I0916 10:30:50.311117 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.312245 2064308 addons.go:69] Setting volumesnapshots=true in profile "addons-451841"
	I0916 10:30:50.312377 2064308 addons.go:234] Setting addon volumesnapshots=true in "addons-451841"
	I0916 10:30:50.312448 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.313757 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.327865 2064308 addons.go:69] Setting gcp-auth=true in profile "addons-451841"
	I0916 10:30:50.327962 2064308 mustload.go:65] Loading cluster: addons-451841
	I0916 10:30:50.330380 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:50.330866 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.331146 2064308 out.go:177] * Verifying Kubernetes components...
	I0916 10:30:50.334941 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:50.349812 2064308 addons.go:69] Setting ingress=true in profile "addons-451841"
	I0916 10:30:50.349850 2064308 addons.go:234] Setting addon ingress=true in "addons-451841"
	I0916 10:30:50.349897 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.350438 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.350648 2064308 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-451841"
	I0916 10:30:50.350723 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.351151 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.369853 2064308 addons.go:69] Setting ingress-dns=true in profile "addons-451841"
	I0916 10:30:50.369893 2064308 addons.go:234] Setting addon ingress-dns=true in "addons-451841"
	I0916 10:30:50.369937 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.370407 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.375867 2064308 out.go:177]   - Using image docker.io/registry:2.8.3
	I0916 10:30:50.382808 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0916 10:30:50.384082 2064308 addons.go:69] Setting inspektor-gadget=true in profile "addons-451841"
	I0916 10:30:50.384111 2064308 addons.go:234] Setting addon inspektor-gadget=true in "addons-451841"
	I0916 10:30:50.384143 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.384714 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.421644 2064308 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.2
	I0916 10:30:50.424401 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0916 10:30:50.424443 2064308 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0916 10:30:50.424517 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.438309 2064308 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.16.2
	I0916 10:30:50.438567 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0916 10:30:50.438585 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (860 bytes)
	I0916 10:30:50.438646 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.439842 2064308 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0916 10:30:50.440236 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-webhook-manager:v1.9.0
	I0916 10:30:50.440402 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.462370 2064308 addons.go:234] Setting addon default-storageclass=true in "addons-451841"
	I0916 10:30:50.462409 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.463191 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.463463 2064308 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:30:50.466889 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0916 10:30:50.467021 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.474834 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0916 10:30:50.474857 2064308 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0916 10:30:50.474919 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.484574 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:30:50.485713 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0916 10:30:50.508488 2064308 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.23
	I0916 10:30:50.525937 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-controller-manager:v1.9.0
	I0916 10:30:50.526169 2064308 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:30:50.526183 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:30:50.526247 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.542222 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0916 10:30:50.543269 2064308 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0916 10:30:50.543418 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0916 10:30:50.543483 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.563839 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-scheduler:v1.9.0
	I0916 10:30:50.567954 2064308 addons.go:431] installing /etc/kubernetes/addons/volcano-deployment.yaml
	I0916 10:30:50.567983 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volcano-deployment.yaml (434001 bytes)
	I0916 10:30:50.568053 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.583888 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.587279 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.3
	I0916 10:30:50.587486 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.11.2
	I0916 10:30:50.589757 2064308 out.go:177]   - Using image docker.io/busybox:stable
	I0916 10:30:50.589894 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0916 10:30:50.592333 2064308 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:30:50.592357 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0916 10:30:50.592588 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.594469 2064308 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0916 10:30:50.594639 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:30:50.596571 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0916 10:30:50.596784 2064308 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:30:50.596798 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0916 10:30:50.596863 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.628847 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:30:50.631659 2064308 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:30:50.631684 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0916 10:30:50.631748 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.645470 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.650239 2064308 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0
	I0916 10:30:50.650364 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0916 10:30:50.650401 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0916 10:30:50.652227 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.653039 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0916 10:30:50.654718 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0916 10:30:50.654790 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.661463 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	I0916 10:30:50.661708 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.654527 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0916 10:30:50.662209 2064308 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0916 10:30:50.662349 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.674173 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0916 10:30:50.676994 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0916 10:30:50.680275 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0916 10:30:50.680305 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0916 10:30:50.680378 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.691037 2064308 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:30:50.691057 2064308 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:30:50.691123 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.774282 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.780046 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.807312 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.827826 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.831006 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.853363 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.867169 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.875051 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.875081 2064308 retry.go:31] will retry after 209.079202ms: ssh: handshake failed: EOF
	I0916 10:30:50.875514 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.878034 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.878076 2064308 retry.go:31] will retry after 358.329045ms: ssh: handshake failed: EOF
	I0916 10:30:50.878970 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.891671 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.913115 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.913144 2064308 retry.go:31] will retry after 291.220359ms: ssh: handshake failed: EOF
	W0916 10:30:51.085514 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:51.085558 2064308 retry.go:31] will retry after 406.090408ms: ssh: handshake failed: EOF
	I0916 10:30:51.380959 2064308 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml": (1.173254923s)
	I0916 10:30:51.381043 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:30:51.381158 2064308 ssh_runner.go:235] Completed: sudo systemctl daemon-reload: (1.046146925s)
	I0916 10:30:51.381191 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:30:51.381193 2064308 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:30:51.393457 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0916 10:30:51.393478 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0916 10:30:51.405074 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:30:51.536141 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:30:51.553523 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0916 10:30:51.553553 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0916 10:30:51.664299 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0916 10:30:51.664331 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0916 10:30:51.694553 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0916 10:30:51.694580 2064308 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0916 10:30:51.695380 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:30:51.703369 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0916 10:30:51.703394 2064308 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0916 10:30:51.711436 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0916 10:30:51.711460 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0916 10:30:51.716209 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml
	I0916 10:30:51.833745 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0916 10:30:51.872114 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0916 10:30:51.872156 2064308 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0916 10:30:51.879573 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0916 10:30:51.879603 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0916 10:30:51.894115 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:30:51.927534 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0916 10:30:51.927573 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0916 10:30:51.967967 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0916 10:30:51.967997 2064308 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0916 10:30:51.987647 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:30:51.987672 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0916 10:30:52.018799 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0916 10:30:52.018835 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0916 10:30:52.040829 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:30:52.040863 2064308 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0916 10:30:52.062309 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0916 10:30:52.062358 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0916 10:30:52.141020 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0916 10:30:52.141055 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0916 10:30:52.150339 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:30:52.156143 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0916 10:30:52.156182 2064308 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0916 10:30:52.267727 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0916 10:30:52.267754 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0916 10:30:52.278358 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0916 10:30:52.278410 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0916 10:30:52.295260 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:30:52.315396 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:30:52.396988 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0916 10:30:52.397029 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0916 10:30:52.414014 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:30:52.414040 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0916 10:30:52.514419 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0916 10:30:52.514447 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0916 10:30:52.534199 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0916 10:30:52.534239 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0916 10:30:52.671597 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (1.290515457s)
	I0916 10:30:52.715081 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0916 10:30:52.715111 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0916 10:30:52.719191 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:30:52.832284 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0916 10:30:52.832313 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0916 10:30:52.924488 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0916 10:30:52.924521 2064308 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0916 10:30:53.168769 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:30:53.168802 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0916 10:30:53.177620 2064308 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (1.796357981s)
	I0916 10:30:53.177657 2064308 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:30:53.177732 2064308 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (1.79643144s)
	I0916 10:30:53.179507 2064308 node_ready.go:35] waiting up to 6m0s for node "addons-451841" to be "Ready" ...
	I0916 10:30:53.184404 2064308 node_ready.go:49] node "addons-451841" has status "Ready":"True"
	I0916 10:30:53.184443 2064308 node_ready.go:38] duration metric: took 4.710029ms for node "addons-451841" to be "Ready" ...
	I0916 10:30:53.184458 2064308 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:30:53.197525 2064308 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace to be "Ready" ...
	I0916 10:30:53.269899 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0916 10:30:53.269941 2064308 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0916 10:30:53.282557 2064308 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:30:53.282590 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0916 10:30:53.466493 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0916 10:30:53.466519 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0916 10:30:53.471643 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:30:53.602578 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:30:53.683185 2064308 kapi.go:214] "coredns" deployment in "kube-system" namespace and "addons-451841" context rescaled to 1 replicas
	I0916 10:30:53.701295 2064308 pod_ready.go:98] error getting pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bvmss" not found
	I0916 10:30:53.701323 2064308 pod_ready.go:82] duration metric: took 503.765362ms for pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace to be "Ready" ...
	E0916 10:30:53.701335 2064308 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bvmss" not found
	I0916 10:30:53.701342 2064308 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace to be "Ready" ...
	I0916 10:30:53.722187 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0916 10:30:53.722214 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0916 10:30:54.162813 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:30:54.162856 2064308 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0916 10:30:54.507449 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:30:55.304651 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (3.899501244s)
	I0916 10:30:55.634996 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (3.939582165s)
	I0916 10:30:55.635110 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (4.098941534s)
	I0916 10:30:55.711983 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:30:57.666996 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0916 10:30:57.667089 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:57.696419 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:57.712916 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:30:58.304674 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0916 10:30:58.423037 2064308 addons.go:234] Setting addon gcp-auth=true in "addons-451841"
	I0916 10:30:58.423145 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:58.423647 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:58.453963 2064308 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0916 10:30:58.454022 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:58.488418 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:59.724111 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:01.085964 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml: (9.369716418s)
	I0916 10:31:01.086088 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (9.252309206s)
	I0916 10:31:01.086143 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (9.191998071s)
	I0916 10:31:01.086179 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (8.93580956s)
	I0916 10:31:01.086966 2064308 addons.go:475] Verifying addon registry=true in "addons-451841"
	I0916 10:31:01.086280 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (8.790993869s)
	I0916 10:31:01.087161 2064308 addons.go:475] Verifying addon ingress=true in "addons-451841"
	I0916 10:31:01.086364 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (8.367136002s)
	I0916 10:31:01.086423 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (7.614752608s)
	I0916 10:31:01.086494 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (7.483872887s)
	I0916 10:31:01.086607 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (8.771185269s)
	I0916 10:31:01.087690 2064308 addons.go:475] Verifying addon metrics-server=true in "addons-451841"
	W0916 10:31:01.087784 2064308 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:31:01.087807 2064308 retry.go:31] will retry after 241.995667ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:31:01.089709 2064308 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-451841 service yakd-dashboard -n yakd-dashboard
	
	I0916 10:31:01.089714 2064308 out.go:177] * Verifying ingress addon...
	I0916 10:31:01.089782 2064308 out.go:177] * Verifying registry addon...
	I0916 10:31:01.092615 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0916 10:31:01.093670 2064308 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0916 10:31:01.146629 2064308 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0916 10:31:01.146661 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:01.147988 2064308 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:31:01.148013 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:01.330778 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:31:01.607432 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:01.608116 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:01.783267 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (7.275754054s)
	I0916 10:31:01.783417 2064308 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-451841"
	I0916 10:31:01.783367 2064308 ssh_runner.go:235] Completed: cat /var/lib/minikube/google_application_credentials.json: (3.329378043s)
	I0916 10:31:01.785766 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:31:01.785796 2064308 out.go:177] * Verifying csi-hostpath-driver addon...
	I0916 10:31:01.788664 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0916 10:31:01.789894 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0916 10:31:01.794958 2064308 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:31:01.795006 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:01.797295 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0916 10:31:01.797332 2064308 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0916 10:31:01.893997 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0916 10:31:01.894071 2064308 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0916 10:31:01.937742 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:31:01.937810 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0916 10:31:01.987240 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:31:02.097286 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:02.100875 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:02.209340 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:02.305635 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:02.597370 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:02.599723 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:02.795942 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:03.100397 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:03.103196 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:03.312002 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:03.374850 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml: (1.387566006s)
	I0916 10:31:03.375988 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (2.045090366s)
	I0916 10:31:03.378127 2064308 addons.go:475] Verifying addon gcp-auth=true in "addons-451841"
	I0916 10:31:03.381777 2064308 out.go:177] * Verifying gcp-auth addon...
	I0916 10:31:03.384298 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0916 10:31:03.406084 2064308 kapi.go:86] Found 0 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:31:03.599867 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:03.600481 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:03.720241 2064308 pod_ready.go:93] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.720276 2064308 pod_ready.go:82] duration metric: took 10.018926311s for pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.720289 2064308 pod_ready.go:79] waiting up to 6m0s for pod "etcd-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.746042 2064308 pod_ready.go:93] pod "etcd-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.746067 2064308 pod_ready.go:82] duration metric: took 25.771231ms for pod "etcd-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.746081 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.752533 2064308 pod_ready.go:93] pod "kube-apiserver-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.752559 2064308 pod_ready.go:82] duration metric: took 6.470582ms for pod "kube-apiserver-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.752571 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.758462 2064308 pod_ready.go:93] pod "kube-controller-manager-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.758495 2064308 pod_ready.go:82] duration metric: took 5.916018ms for pod "kube-controller-manager-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.758507 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tltkn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.765336 2064308 pod_ready.go:93] pod "kube-proxy-tltkn" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.765369 2064308 pod_ready.go:82] duration metric: took 6.854119ms for pod "kube-proxy-tltkn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.765382 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.795811 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:04.099344 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:04.100673 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:04.104903 2064308 pod_ready.go:93] pod "kube-scheduler-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:04.104972 2064308 pod_ready.go:82] duration metric: took 339.581954ms for pod "kube-scheduler-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:04.104999 2064308 pod_ready.go:79] waiting up to 6m0s for pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:04.295860 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:04.598910 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:04.602815 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:04.795954 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:05.100224 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:05.101534 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:05.296166 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:05.599439 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:05.601426 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:05.795442 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:06.102393 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:06.103036 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:06.122130 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:06.299045 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:06.599932 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:06.601206 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:06.814263 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:07.096848 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:07.101223 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:07.295217 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:07.599444 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:07.600431 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:07.795082 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:08.101892 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:08.102976 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:08.296014 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:08.598395 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:08.598643 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:08.613020 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:08.795739 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:09.103941 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:09.104967 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:09.295694 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:09.599659 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:09.601180 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:09.796354 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:10.098446 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:10.099577 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:10.295198 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:10.597281 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:10.599286 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:10.616287 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:10.795720 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:11.097801 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:11.099342 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:11.297048 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:11.599247 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:11.599974 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:11.794513 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:12.097432 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:12.099058 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:12.295097 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:12.598578 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:12.599897 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:12.796822 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:13.096898 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:13.098940 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:13.112547 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:13.295802 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:13.599642 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:13.600761 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:13.794583 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:14.096452 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:14.098429 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:14.297517 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:14.598010 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:14.599983 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:14.795140 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:15.104125 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:15.104975 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:15.113778 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:15.295679 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:15.598018 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:15.598555 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:15.795791 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:16.096811 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:16.099236 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:16.296057 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:16.597945 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:16.599646 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:16.797262 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:17.098985 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:17.099689 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:17.295469 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:17.599269 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:17.600683 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:17.611951 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:17.794427 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:18.099862 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:18.101710 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:18.296191 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:18.596772 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:18.600049 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:18.811403 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:19.098130 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:19.099143 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:19.111509 2064308 pod_ready.go:93] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:19.111570 2064308 pod_ready.go:82] duration metric: took 15.006549742s for pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:19.111587 2064308 pod_ready.go:39] duration metric: took 25.927112572s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:31:19.111604 2064308 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:31:19.111670 2064308 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:31:19.162518 2064308 api_server.go:72] duration metric: took 28.954985289s to wait for apiserver process to appear ...
	I0916 10:31:19.162546 2064308 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:31:19.162572 2064308 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:31:19.179642 2064308 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:31:19.180628 2064308 api_server.go:141] control plane version: v1.31.1
	I0916 10:31:19.180658 2064308 api_server.go:131] duration metric: took 18.103285ms to wait for apiserver health ...
	I0916 10:31:19.180668 2064308 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:31:19.201200 2064308 system_pods.go:59] 18 kube-system pods found
	I0916 10:31:19.201280 2064308 system_pods.go:61] "coredns-7c65d6cfc9-jqthn" [bc44b698-a863-4ab8-85d8-5bd54d141bd3] Running
	I0916 10:31:19.201299 2064308 system_pods.go:61] "csi-hostpath-attacher-0" [b2e317f7-5d09-4cd6-823d-cc849d3bf9e2] Running
	I0916 10:31:19.201316 2064308 system_pods.go:61] "csi-hostpath-resizer-0" [1d31c631-4247-4ffe-8a87-9b11803bc389] Running
	I0916 10:31:19.201350 2064308 system_pods.go:61] "csi-hostpathplugin-r28vj" [ad580f65-4a4a-445c-bec7-b518245397ea] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0916 10:31:19.201372 2064308 system_pods.go:61] "etcd-addons-451841" [1a9d168e-83c3-4fc0-b188-1da688352b51] Running
	I0916 10:31:19.201392 2064308 system_pods.go:61] "kindnet-zckxr" [b831bf61-6a35-4681-afe4-f5a0f875a7b5] Running
	I0916 10:31:19.201409 2064308 system_pods.go:61] "kube-apiserver-addons-451841" [db38aff8-23ff-466f-a1b0-ab5f2041183c] Running
	I0916 10:31:19.201425 2064308 system_pods.go:61] "kube-controller-manager-addons-451841" [53d22d78-137d-4306-b8df-d9a55061c9df] Running
	I0916 10:31:19.201458 2064308 system_pods.go:61] "kube-ingress-dns-minikube" [c77fac29-39b0-4e39-94f6-a72ac395b130] Running
	I0916 10:31:19.201483 2064308 system_pods.go:61] "kube-proxy-tltkn" [073e7c9c-1ec1-45db-9603-68862d546266] Running
	I0916 10:31:19.201501 2064308 system_pods.go:61] "kube-scheduler-addons-451841" [5b953c55-7b45-4221-89bf-177d1a4efd05] Running
	I0916 10:31:19.201520 2064308 system_pods.go:61] "metrics-server-84c5f94fbc-q47pm" [2252baaa-acbc-43dd-b38e-e8cd59ac7825] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0916 10:31:19.201537 2064308 system_pods.go:61] "nvidia-device-plugin-daemonset-l6r5c" [9645617a-ab29-4c4e-a4ec-4ea217ffb5fb] Running
	I0916 10:31:19.201566 2064308 system_pods.go:61] "registry-66c9cd494c-l957b" [17af08bf-9965-4bec-8d1b-0c4c37167ac1] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
	I0916 10:31:19.201591 2064308 system_pods.go:61] "registry-proxy-9cpxl" [10361d7a-9abb-41e6-88eb-2194d01e1301] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0916 10:31:19.201613 2064308 system_pods.go:61] "snapshot-controller-56fcc65765-6llf9" [7febb7b8-15ce-499d-83f7-b82e03e35902] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.201634 2064308 system_pods.go:61] "snapshot-controller-56fcc65765-qxvll" [0a01a9d7-31e0-4965-baba-078f44b17fac] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.201663 2064308 system_pods.go:61] "storage-provisioner" [6a86a07f-e25d-4ac9-9c07-dc9ae0048083] Running
	I0916 10:31:19.201686 2064308 system_pods.go:74] duration metric: took 21.010389ms to wait for pod list to return data ...
	I0916 10:31:19.201707 2064308 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:31:19.204845 2064308 default_sa.go:45] found service account: "default"
	I0916 10:31:19.204868 2064308 default_sa.go:55] duration metric: took 3.144001ms for default service account to be created ...
	I0916 10:31:19.204877 2064308 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:31:19.219489 2064308 system_pods.go:86] 18 kube-system pods found
	I0916 10:31:19.219563 2064308 system_pods.go:89] "coredns-7c65d6cfc9-jqthn" [bc44b698-a863-4ab8-85d8-5bd54d141bd3] Running
	I0916 10:31:19.219586 2064308 system_pods.go:89] "csi-hostpath-attacher-0" [b2e317f7-5d09-4cd6-823d-cc849d3bf9e2] Running
	I0916 10:31:19.219605 2064308 system_pods.go:89] "csi-hostpath-resizer-0" [1d31c631-4247-4ffe-8a87-9b11803bc389] Running
	I0916 10:31:19.219640 2064308 system_pods.go:89] "csi-hostpathplugin-r28vj" [ad580f65-4a4a-445c-bec7-b518245397ea] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0916 10:31:19.219669 2064308 system_pods.go:89] "etcd-addons-451841" [1a9d168e-83c3-4fc0-b188-1da688352b51] Running
	I0916 10:31:19.219690 2064308 system_pods.go:89] "kindnet-zckxr" [b831bf61-6a35-4681-afe4-f5a0f875a7b5] Running
	I0916 10:31:19.219710 2064308 system_pods.go:89] "kube-apiserver-addons-451841" [db38aff8-23ff-466f-a1b0-ab5f2041183c] Running
	I0916 10:31:19.219728 2064308 system_pods.go:89] "kube-controller-manager-addons-451841" [53d22d78-137d-4306-b8df-d9a55061c9df] Running
	I0916 10:31:19.219766 2064308 system_pods.go:89] "kube-ingress-dns-minikube" [c77fac29-39b0-4e39-94f6-a72ac395b130] Running
	I0916 10:31:19.219784 2064308 system_pods.go:89] "kube-proxy-tltkn" [073e7c9c-1ec1-45db-9603-68862d546266] Running
	I0916 10:31:19.219799 2064308 system_pods.go:89] "kube-scheduler-addons-451841" [5b953c55-7b45-4221-89bf-177d1a4efd05] Running
	I0916 10:31:19.219819 2064308 system_pods.go:89] "metrics-server-84c5f94fbc-q47pm" [2252baaa-acbc-43dd-b38e-e8cd59ac7825] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0916 10:31:19.219847 2064308 system_pods.go:89] "nvidia-device-plugin-daemonset-l6r5c" [9645617a-ab29-4c4e-a4ec-4ea217ffb5fb] Running
	I0916 10:31:19.219875 2064308 system_pods.go:89] "registry-66c9cd494c-l957b" [17af08bf-9965-4bec-8d1b-0c4c37167ac1] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
	I0916 10:31:19.219896 2064308 system_pods.go:89] "registry-proxy-9cpxl" [10361d7a-9abb-41e6-88eb-2194d01e1301] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0916 10:31:19.219915 2064308 system_pods.go:89] "snapshot-controller-56fcc65765-6llf9" [7febb7b8-15ce-499d-83f7-b82e03e35902] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.219935 2064308 system_pods.go:89] "snapshot-controller-56fcc65765-qxvll" [0a01a9d7-31e0-4965-baba-078f44b17fac] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.219968 2064308 system_pods.go:89] "storage-provisioner" [6a86a07f-e25d-4ac9-9c07-dc9ae0048083] Running
	I0916 10:31:19.219989 2064308 system_pods.go:126] duration metric: took 15.104177ms to wait for k8s-apps to be running ...
	I0916 10:31:19.220008 2064308 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:31:19.220090 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:31:19.263162 2064308 system_svc.go:56] duration metric: took 43.144676ms WaitForService to wait for kubelet
	I0916 10:31:19.263243 2064308 kubeadm.go:582] duration metric: took 29.055714708s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:31:19.263279 2064308 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:31:19.272478 2064308 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:31:19.272561 2064308 node_conditions.go:123] node cpu capacity is 2
	I0916 10:31:19.272591 2064308 node_conditions.go:105] duration metric: took 9.29091ms to run NodePressure ...
	I0916 10:31:19.272616 2064308 start.go:241] waiting for startup goroutines ...
	I0916 10:31:19.305039 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:19.605207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:19.605801 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:19.797193 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:20.099691 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:20.101048 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:20.295291 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:20.597682 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:20.598569 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:20.797887 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:21.096766 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:21.099258 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:21.294755 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:21.597973 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:21.600238 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:21.803444 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:22.097870 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:22.100851 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:22.295006 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:22.597700 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:22.598742 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:22.795839 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:23.096175 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:23.098155 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:23.294814 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:23.596166 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:23.598634 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:23.795172 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:24.096643 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:24.099715 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:24.297255 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:24.598721 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:24.599933 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:24.795260 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:25.098369 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:25.101032 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:25.295093 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:25.597734 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:25.597966 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:25.795323 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:26.096041 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:26.099677 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:26.295063 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:26.597593 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:26.599159 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:26.795825 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:27.098811 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:27.099453 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:27.295012 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:27.597182 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:27.601645 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:27.795056 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:28.128064 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:28.129640 2064308 kapi.go:107] duration metric: took 27.037023988s to wait for kubernetes.io/minikube-addons=registry ...
	I0916 10:31:28.325425 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:28.598623 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:28.795615 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:29.104511 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:29.295646 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:29.598962 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:29.795067 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:30.099851 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:30.296647 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:30.598332 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:30.796058 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:31.099992 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:31.294874 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:31.598117 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:31.796531 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:32.098393 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:32.295287 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:32.598055 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:32.795217 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:33.099311 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:33.295339 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:33.598188 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:33.795029 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:34.098345 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:34.295712 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:34.598442 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:34.795386 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:35.098874 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:35.295415 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:35.598136 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:35.795586 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:36.098658 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:36.294379 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:36.598764 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:36.795529 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:37.098523 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:37.296711 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:37.601252 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:37.799472 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:38.100971 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:38.298686 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:38.599535 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:38.795481 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:39.098734 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:39.296827 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:39.611876 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:39.851830 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:40.108718 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:40.295843 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:40.599050 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:40.795575 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:41.098568 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:41.295957 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:41.598039 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:41.796038 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:42.099484 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:42.295707 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:42.598887 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:42.795416 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:43.099107 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:43.295766 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:43.599999 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:43.795242 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:44.098395 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:44.295957 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:44.600054 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:44.794470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:45.100863 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:45.295685 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:45.599065 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:45.798514 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:46.099116 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:46.296057 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:46.599389 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:46.796585 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:47.099083 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:47.296145 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:47.598490 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:47.797079 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:48.100448 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:48.295294 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:48.598227 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:48.794662 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:49.119185 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:49.295351 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:49.598797 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:49.794962 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:50.098374 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:50.295501 2064308 kapi.go:107] duration metric: took 48.505612662s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0916 10:31:50.598550 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:51.098277 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:51.598976 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:52.098206 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:52.597960 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:53.098585 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:53.598884 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:54.098582 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:54.598852 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:55.098478 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:55.598212 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:56.098517 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:56.598412 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:57.098499 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:57.598710 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:58.097637 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:58.599134 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:59.098778 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:59.598318 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:00.130067 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:00.598955 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:01.098901 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:01.598465 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:02.098925 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:02.598148 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:03.102570 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:03.598295 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:04.099028 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:04.598994 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:05.100186 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:05.598454 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:06.098931 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:06.598336 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:07.098800 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:07.599302 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:08.098401 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:08.599190 2064308 kapi.go:107] duration metric: took 1m7.505513413s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0916 10:32:25.388811 2064308 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:32:25.388836 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:25.888825 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:26.388022 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:26.887847 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:27.387834 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:27.888795 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:28.387767 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:28.887542 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:29.388486 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:29.888784 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:30.387676 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:30.888490 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:31.388236 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:31.888242 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:32.387732 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:32.888206 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:33.387868 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:33.887962 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:34.387683 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:34.889279 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:35.388145 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:35.887555 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:36.389045 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:36.887848 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:37.388742 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:37.888016 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:38.388211 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:38.887716 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:39.388708 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:39.888575 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:40.388841 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:40.888385 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:41.388668 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:41.887792 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:42.388021 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:42.888125 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:43.388320 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:43.887796 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:44.388101 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:44.888791 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:45.391207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:45.888190 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:46.387869 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:46.887554 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:47.388470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:47.888177 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:48.387903 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:48.888232 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:49.388449 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:49.888527 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:50.388650 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:50.888495 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:51.388590 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:51.888197 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:52.387563 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:52.888238 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:53.388322 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:53.887557 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:54.388664 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:54.888836 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:55.388171 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:55.888180 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:56.388322 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:56.888567 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:57.388117 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:57.887422 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:58.388230 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:58.887872 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:59.396878 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:59.888550 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:00.394252 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:00.887612 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:01.392523 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:01.888091 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:02.393207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:02.887610 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:03.388745 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:03.888344 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:04.388999 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:04.889012 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:05.390448 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:05.888198 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:06.395413 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:06.889275 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:07.387879 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:07.888183 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:08.388311 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:08.888612 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:09.388334 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:09.887931 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:10.387765 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:10.888317 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:11.388557 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:11.887439 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:12.388213 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:12.887810 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:13.388135 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:13.888239 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:14.388445 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:14.889102 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:15.388533 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:15.887383 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:16.388426 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:16.888022 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:17.388399 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:17.887327 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:18.388016 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:18.887470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:19.388533 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:19.889124 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:20.387631 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:20.888484 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:21.388124 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:21.887946 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:22.388268 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:22.887332 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:23.388224 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:23.887844 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:24.387744 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:24.888405 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:25.388231 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:25.888672 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:26.388063 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:26.888126 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:27.387865 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:27.887552 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:28.387806 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:28.887772 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:29.388587 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:29.888551 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:30.387903 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:30.887507 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:31.388609 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:31.888449 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:32.388259 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:32.887834 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:33.388141 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:33.888934 2064308 kapi.go:107] duration metric: took 2m30.504634261s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0916 10:33:33.890859 2064308 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-451841 cluster.
	I0916 10:33:33.892432 2064308 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0916 10:33:33.893920 2064308 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0916 10:33:33.895584 2064308 out.go:177] * Enabled addons: nvidia-device-plugin, storage-provisioner, ingress-dns, storage-provisioner-rancher, volcano, cloud-spanner, metrics-server, inspektor-gadget, yakd, default-storageclass, volumesnapshots, registry, csi-hostpath-driver, ingress, gcp-auth
	I0916 10:33:33.897279 2064308 addons.go:510] duration metric: took 2m43.689318504s for enable addons: enabled=[nvidia-device-plugin storage-provisioner ingress-dns storage-provisioner-rancher volcano cloud-spanner metrics-server inspektor-gadget yakd default-storageclass volumesnapshots registry csi-hostpath-driver ingress gcp-auth]
	I0916 10:33:33.897342 2064308 start.go:246] waiting for cluster config update ...
	I0916 10:33:33.897367 2064308 start.go:255] writing updated cluster config ...
	I0916 10:33:33.898186 2064308 ssh_runner.go:195] Run: rm -f paused
	I0916 10:33:33.906793 2064308 out.go:177] * Done! kubectl is now configured to use "addons-451841" cluster and "default" namespace by default
	E0916 10:33:33.908425 2064308 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                                     ATTEMPT             POD ID              POD
	4bf6f7c832fdd       4f725bf50aaa5       About a minute ago   Exited              gadget                                   6                   11fc6773b3e1a       gadget-wjwc2
	f90c8869604c5       6ef582f3ec844       4 minutes ago        Running             gcp-auth                                 0                   9038b6b53facd       gcp-auth-89d5ffd79-pw58v
	6f68aecec6aa2       8b46b1cd48760       6 minutes ago        Running             admission                                0                   e3af2951f3794       volcano-admission-77d7d48b68-sjxcs
	dd63136d8d6ac       289a818c8d9c5       6 minutes ago        Running             controller                               0                   b4699f942aa64       ingress-nginx-controller-bc57996ff-rqhcp
	a490639f0e8aa       ee6d597e62dc8       6 minutes ago        Running             csi-snapshotter                          0                   8260f57befdda       csi-hostpathplugin-r28vj
	317ce7462f733       642ded511e141       6 minutes ago        Running             csi-provisioner                          0                   8260f57befdda       csi-hostpathplugin-r28vj
	324d1501e94dc       420193b27261a       6 minutes ago        Exited              patch                                    2                   13d0568de4b9d       ingress-nginx-admission-patch-z2qc7
	7b9cc7b5195ab       922312104da8a       6 minutes ago        Running             liveness-probe                           0                   8260f57befdda       csi-hostpathplugin-r28vj
	bee97d004dc4d       08f6b2990811a       6 minutes ago        Running             hostpath                                 0                   8260f57befdda       csi-hostpathplugin-r28vj
	19ec6d5fbc0fd       8b46b1cd48760       6 minutes ago        Exited              main                                     0                   009b63594d8b2       volcano-admission-init-bz266
	000463bf50714       d9c7ad4c226bf       6 minutes ago        Running             volcano-scheduler                        0                   d9214c0e709d4       volcano-scheduler-576bc46687-xwjbn
	4dc42ec686d73       1505f556b3a7b       6 minutes ago        Running             volcano-controllers                      0                   343744c6dcf07       volcano-controllers-56675bb4d5-2ltwp
	30f0f6b13e6d5       420193b27261a       6 minutes ago        Exited              create                                   0                   993d0544f3868       ingress-nginx-admission-create-4vr4g
	32df8554c702e       4d1e5c3e97420       6 minutes ago        Running             volume-snapshot-controller               0                   864cff1eb40c5       snapshot-controller-56fcc65765-6llf9
	f28ea158892d3       5548a49bb60ba       6 minutes ago        Running             metrics-server                           0                   f49e908ac9969       metrics-server-84c5f94fbc-q47pm
	7558a63005c7b       0107d56dbc0be       6 minutes ago        Running             node-driver-registrar                    0                   8260f57befdda       csi-hostpathplugin-r28vj
	d181a00ffae8d       4d1e5c3e97420       6 minutes ago        Running             volume-snapshot-controller               0                   5f5c44341cf11       snapshot-controller-56fcc65765-qxvll
	952cd1e0c1c93       77bdba588b953       6 minutes ago        Running             yakd                                     0                   0b98f618250c0       yakd-dashboard-67d98fc6b-djh62
	6b271689ecd4e       7ce2150c8929b       6 minutes ago        Running             local-path-provisioner                   0                   de8add92893e8       local-path-provisioner-86d989889c-qkpm6
	45dcb038c40ff       3410e1561990a       7 minutes ago        Running             registry-proxy                           0                   7b57eb1530381       registry-proxy-9cpxl
	bd1a124f1e30d       8be4bcf8ec607       7 minutes ago        Running             cloud-spanner-emulator                   0                   14a43ef4433bb       cloud-spanner-emulator-769b77f747-m5wld
	d221c581079ff       c9cf76bb104e1       7 minutes ago        Running             registry                                 0                   82bcc82f48993       registry-66c9cd494c-l957b
	3f681021aea3a       a9bac31a5be8d       7 minutes ago        Running             nvidia-device-plugin-ctr                 0                   2ef7d8ec9c04f       nvidia-device-plugin-daemonset-l6r5c
	98b48c685a09e       487fa743e1e22       7 minutes ago        Running             csi-resizer                              0                   89cb8ade3231b       csi-hostpath-resizer-0
	2472144c5bc6d       1461903ec4fe9       7 minutes ago        Running             csi-external-health-monitor-controller   0                   8260f57befdda       csi-hostpathplugin-r28vj
	0af6491cd95ee       9a80d518f102c       7 minutes ago        Running             csi-attacher                             0                   0d39436266817       csi-hostpath-attacher-0
	9b811a5c5e80c       35508c2f890c4       7 minutes ago        Running             minikube-ingress-dns                     0                   e1ed027bac8d8       kube-ingress-dns-minikube
	5232ad6b096cb       2f6c962e7b831       7 minutes ago        Running             coredns                                  0                   3ad39eb105298       coredns-7c65d6cfc9-jqthn
	4ddb5fa614111       ba04bb24b9575       7 minutes ago        Running             storage-provisioner                      0                   d0cffc65c18c1       storage-provisioner
	64b671b165f6f       6a23fa8fd2b78       7 minutes ago        Running             kindnet-cni                              0                   bd9ef3e1818e4       kindnet-zckxr
	35987f39fe9ef       24a140c548c07       7 minutes ago        Running             kube-proxy                               0                   6a8ebbdde94be       kube-proxy-tltkn
	8769c148a0bb3       27e3830e14027       7 minutes ago        Running             etcd                                     0                   290d52892953c       etcd-addons-451841
	31da3c8e5867c       279f381cb3736       7 minutes ago        Running             kube-controller-manager                  0                   349d5195292e8       kube-controller-manager-addons-451841
	808425f96a229       7f8aa378bb47d       7 minutes ago        Running             kube-scheduler                           0                   50415da17c7f0       kube-scheduler-addons-451841
	2870b9699fd97       d3f53a98c0a9d       7 minutes ago        Running             kube-apiserver                           0                   1d8868dd2cf0d       kube-apiserver-addons-451841
	
	
	==> containerd <==
	Sep 16 10:34:32 addons-451841 containerd[816]: time="2024-09-16T10:34:32.673424857Z" level=info msg="CreateContainer within sandbox \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\" for &ContainerMetadata{Name:gadget,Attempt:5,} returns container id \"8822f01acad4dbe601571b1f9f74d6e699cd5dc6b81ca2296af8a81db5145e42\""
	Sep 16 10:34:32 addons-451841 containerd[816]: time="2024-09-16T10:34:32.674716901Z" level=info msg="StartContainer for \"8822f01acad4dbe601571b1f9f74d6e699cd5dc6b81ca2296af8a81db5145e42\""
	Sep 16 10:34:32 addons-451841 containerd[816]: time="2024-09-16T10:34:32.736227159Z" level=info msg="StartContainer for \"8822f01acad4dbe601571b1f9f74d6e699cd5dc6b81ca2296af8a81db5145e42\" returns successfully"
	Sep 16 10:34:34 addons-451841 containerd[816]: time="2024-09-16T10:34:34.318513195Z" level=info msg="shim disconnected" id=8822f01acad4dbe601571b1f9f74d6e699cd5dc6b81ca2296af8a81db5145e42 namespace=k8s.io
	Sep 16 10:34:34 addons-451841 containerd[816]: time="2024-09-16T10:34:34.318584473Z" level=warning msg="cleaning up after shim disconnected" id=8822f01acad4dbe601571b1f9f74d6e699cd5dc6b81ca2296af8a81db5145e42 namespace=k8s.io
	Sep 16 10:34:34 addons-451841 containerd[816]: time="2024-09-16T10:34:34.318594795Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:34:34 addons-451841 containerd[816]: time="2024-09-16T10:34:34.649310701Z" level=info msg="RemoveContainer for \"e5775d74a82fdc47a2354d7b955aa86e41f6723d746a84e61e2e68a2a790a79e\""
	Sep 16 10:34:34 addons-451841 containerd[816]: time="2024-09-16T10:34:34.657646733Z" level=info msg="RemoveContainer for \"e5775d74a82fdc47a2354d7b955aa86e41f6723d746a84e61e2e68a2a790a79e\" returns successfully"
	Sep 16 10:37:22 addons-451841 containerd[816]: time="2024-09-16T10:37:22.524284864Z" level=info msg="PullImage \"ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\""
	Sep 16 10:37:22 addons-451841 containerd[816]: time="2024-09-16T10:37:22.659443362Z" level=info msg="ImageUpdate event name:\"ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:37:22 addons-451841 containerd[816]: time="2024-09-16T10:37:22.665370469Z" level=info msg="stop pulling image ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec: active requests=0, bytes read=89"
	Sep 16 10:37:22 addons-451841 containerd[816]: time="2024-09-16T10:37:22.669341831Z" level=info msg="Pulled image \"ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\" with image id \"sha256:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd\", repo tag \"\", repo digest \"ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\", size \"72524105\" in 145.00378ms"
	Sep 16 10:37:22 addons-451841 containerd[816]: time="2024-09-16T10:37:22.669392325Z" level=info msg="PullImage \"ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\" returns image reference \"sha256:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd\""
	Sep 16 10:37:22 addons-451841 containerd[816]: time="2024-09-16T10:37:22.671552211Z" level=info msg="CreateContainer within sandbox \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\" for container &ContainerMetadata{Name:gadget,Attempt:6,}"
	Sep 16 10:37:22 addons-451841 containerd[816]: time="2024-09-16T10:37:22.685888033Z" level=info msg="CreateContainer within sandbox \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\" for &ContainerMetadata{Name:gadget,Attempt:6,} returns container id \"4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c\""
	Sep 16 10:37:22 addons-451841 containerd[816]: time="2024-09-16T10:37:22.686628639Z" level=info msg="StartContainer for \"4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c\""
	Sep 16 10:37:22 addons-451841 containerd[816]: time="2024-09-16T10:37:22.740553568Z" level=info msg="StartContainer for \"4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c\" returns successfully"
	Sep 16 10:37:24 addons-451841 containerd[816]: time="2024-09-16T10:37:24.112911597Z" level=error msg="ExecSync for \"4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c\" failed" error="failed to exec in container: failed to start exec \"68720ecdec7f6c8436d7b5f656706d95508d45407dc91a0b1d915fa8280f80fc\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Sep 16 10:37:24 addons-451841 containerd[816]: time="2024-09-16T10:37:24.125095589Z" level=error msg="ExecSync for \"4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c\" failed" error="failed to exec in container: failed to start exec \"4a3d2129e274b4e1a6cfdbc8a08b298ac9d7c46355239fdfb38fda19484413bc\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Sep 16 10:37:24 addons-451841 containerd[816]: time="2024-09-16T10:37:24.140369800Z" level=error msg="ExecSync for \"4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c\" failed" error="failed to exec in container: failed to start exec \"4765972b973dad3bba47c31475ef7a2e59a1e103aa8bf884db720d0c2b753515\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Sep 16 10:37:24 addons-451841 containerd[816]: time="2024-09-16T10:37:24.282983164Z" level=info msg="shim disconnected" id=4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c namespace=k8s.io
	Sep 16 10:37:24 addons-451841 containerd[816]: time="2024-09-16T10:37:24.283044924Z" level=warning msg="cleaning up after shim disconnected" id=4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c namespace=k8s.io
	Sep 16 10:37:24 addons-451841 containerd[816]: time="2024-09-16T10:37:24.283055664Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:37:25 addons-451841 containerd[816]: time="2024-09-16T10:37:25.102835481Z" level=info msg="RemoveContainer for \"8822f01acad4dbe601571b1f9f74d6e699cd5dc6b81ca2296af8a81db5145e42\""
	Sep 16 10:37:25 addons-451841 containerd[816]: time="2024-09-16T10:37:25.110197384Z" level=info msg="RemoveContainer for \"8822f01acad4dbe601571b1f9f74d6e699cd5dc6b81ca2296af8a81db5145e42\" returns successfully"
	
	
	==> coredns [5232ad6b096cb39cf18a9c11e936d3dae11b081bd6666741f3c42e78161ed09f] <==
	[INFO] 10.244.0.9:45725 - 37874 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000066576s
	[INFO] 10.244.0.9:59523 - 16440 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002202223s
	[INFO] 10.244.0.9:59523 - 22330 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002459412s
	[INFO] 10.244.0.9:50469 - 36811 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000147232s
	[INFO] 10.244.0.9:50469 - 6599 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000154395s
	[INFO] 10.244.0.9:54670 - 20364 "AAAA IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000113739s
	[INFO] 10.244.0.9:54670 - 51376 "A IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000392596s
	[INFO] 10.244.0.9:37135 - 16205 "A IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000064189s
	[INFO] 10.244.0.9:37135 - 64832 "AAAA IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.00005714s
	[INFO] 10.244.0.9:54223 - 7962 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000052168s
	[INFO] 10.244.0.9:54223 - 14360 "AAAA IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000048632s
	[INFO] 10.244.0.9:33840 - 38805 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001404552s
	[INFO] 10.244.0.9:33840 - 4752 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.00164702s
	[INFO] 10.244.0.9:45027 - 58736 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000075766s
	[INFO] 10.244.0.9:45027 - 39026 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000213093s
	[INFO] 10.244.0.24:51483 - 10090 "AAAA IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000202231s
	[INFO] 10.244.0.24:42195 - 64926 "A IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000154649s
	[INFO] 10.244.0.24:32892 - 59527 "AAAA IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000163215s
	[INFO] 10.244.0.24:47611 - 11902 "A IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000096418s
	[INFO] 10.244.0.24:59950 - 37722 "AAAA IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.00008466s
	[INFO] 10.244.0.24:52002 - 29131 "A IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000089403s
	[INFO] 10.244.0.24:38598 - 65011 "AAAA IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.002266241s
	[INFO] 10.244.0.24:60458 - 11928 "A IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.001809759s
	[INFO] 10.244.0.24:43975 - 30277 "A IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 610 0.001862182s
	[INFO] 10.244.0.24:51154 - 58482 "AAAA IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 240 0.002138832s
	
	
	==> describe nodes <==
	Name:               addons-451841
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-451841
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=addons-451841
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_30_46_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-451841
	Annotations:        csi.volume.kubernetes.io/nodeid: {"hostpath.csi.k8s.io":"addons-451841"}
	                    kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:30:42 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-451841
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:38:25 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:33:49 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:33:49 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:33:49 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:33:49 +0000   Mon, 16 Sep 2024 10:30:42 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-451841
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 003b87cb77e5465aa882d8df5f5cd5ab
	  System UUID:                21a29522-aef6-4d70-a29b-0ea27731fdbe
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (27 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     cloud-spanner-emulator-769b77f747-m5wld     0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m34s
	  gadget                      gadget-wjwc2                                0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m31s
	  gcp-auth                    gcp-auth-89d5ffd79-pw58v                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         6m2s
	  ingress-nginx               ingress-nginx-controller-bc57996ff-rqhcp    100m (5%)     0 (0%)      90Mi (1%)        0 (0%)         7m29s
	  kube-system                 coredns-7c65d6cfc9-jqthn                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     7m37s
	  kube-system                 csi-hostpath-attacher-0                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m26s
	  kube-system                 csi-hostpath-resizer-0                      0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m26s
	  kube-system                 csi-hostpathplugin-r28vj                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m26s
	  kube-system                 etcd-addons-451841                          100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         7m42s
	  kube-system                 kindnet-zckxr                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      7m38s
	  kube-system                 kube-apiserver-addons-451841                250m (12%)    0 (0%)      0 (0%)           0 (0%)         7m42s
	  kube-system                 kube-controller-manager-addons-451841       200m (10%)    0 (0%)      0 (0%)           0 (0%)         7m43s
	  kube-system                 kube-ingress-dns-minikube                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m33s
	  kube-system                 kube-proxy-tltkn                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m38s
	  kube-system                 kube-scheduler-addons-451841                100m (5%)     0 (0%)      0 (0%)           0 (0%)         7m42s
	  kube-system                 metrics-server-84c5f94fbc-q47pm             100m (5%)     0 (0%)      200Mi (2%)       0 (0%)         7m32s
	  kube-system                 nvidia-device-plugin-daemonset-l6r5c        0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m35s
	  kube-system                 registry-66c9cd494c-l957b                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m33s
	  kube-system                 registry-proxy-9cpxl                        0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m33s
	  kube-system                 snapshot-controller-56fcc65765-6llf9        0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m30s
	  kube-system                 snapshot-controller-56fcc65765-qxvll        0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m30s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m32s
	  local-path-storage          local-path-provisioner-86d989889c-qkpm6     0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m32s
	  volcano-system              volcano-admission-77d7d48b68-sjxcs          0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m28s
	  volcano-system              volcano-controllers-56675bb4d5-2ltwp        0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m27s
	  volcano-system              volcano-scheduler-576bc46687-xwjbn          0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m27s
	  yakd-dashboard              yakd-dashboard-67d98fc6b-djh62              0 (0%)        0 (0%)      128Mi (1%)       256Mi (3%)     7m31s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                1050m (52%)  100m (5%)
	  memory             638Mi (8%)   476Mi (6%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-1Gi      0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	  hugepages-32Mi     0 (0%)       0 (0%)
	  hugepages-64Ki     0 (0%)       0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 7m36s                  kube-proxy       
	  Normal   NodeAllocatableEnforced  7m50s                  kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 7m50s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  7m50s (x8 over 7m50s)  kubelet          Node addons-451841 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    7m50s (x7 over 7m50s)  kubelet          Node addons-451841 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     7m50s (x7 over 7m50s)  kubelet          Node addons-451841 status is now: NodeHasSufficientPID
	  Normal   Starting                 7m50s                  kubelet          Starting kubelet.
	  Normal   Starting                 7m42s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 7m42s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  7m42s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  7m42s                  kubelet          Node addons-451841 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    7m42s                  kubelet          Node addons-451841 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     7m42s                  kubelet          Node addons-451841 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           7m39s                  node-controller  Node addons-451841 event: Registered Node addons-451841 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [8769c148a0bb341cc1dcca117d41b6be795d52ed6e49348d14da26aac1d42f01] <==
	{"level":"info","ts":"2024-09-16T10:30:38.620397Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:30:38.620645Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:30:38.620676Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:30:38.620740Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:30:38.620758Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:30:39.574728Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.574952Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.575045Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.575117Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575193Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575240Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575326Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.581756Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:addons-451841 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:30:39.582006Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:30:39.582134Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:30:39.582418Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:30:39.582525Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:30:39.582434Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.583536Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:30:39.584617Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:30:39.585041Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.590779Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.590980Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.591021Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:30:39.592081Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	
	
	==> gcp-auth [f90c8869604c54edfd93d5ef8e6467ed81e6a63fbedf9c5712f155d5d85f40b8] <==
	2024/09/16 10:33:32 GCP Auth Webhook started!
	
	
	==> kernel <==
	 10:38:27 up 1 day, 14:20,  0 users,  load average: 0.28, 0.88, 1.75
	Linux addons-451841 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [64b671b165f6f7bb28b281ddd3fe708221407f35f09389c964253f52887fd626] <==
	I0916 10:36:21.726448       1 main.go:299] handling current node
	I0916 10:36:31.721874       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:36:31.721910       1 main.go:299] handling current node
	I0916 10:36:41.720953       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:36:41.720987       1 main.go:299] handling current node
	I0916 10:36:51.720937       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:36:51.720972       1 main.go:299] handling current node
	I0916 10:37:01.727834       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:01.727867       1 main.go:299] handling current node
	I0916 10:37:11.729432       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:11.729465       1 main.go:299] handling current node
	I0916 10:37:21.720904       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:21.720938       1 main.go:299] handling current node
	I0916 10:37:31.722487       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:31.722584       1 main.go:299] handling current node
	I0916 10:37:41.720891       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:41.720925       1 main.go:299] handling current node
	I0916 10:37:51.720989       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:51.721216       1 main.go:299] handling current node
	I0916 10:38:01.725211       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:01.725246       1 main.go:299] handling current node
	I0916 10:38:11.726624       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:11.726661       1 main.go:299] handling current node
	I0916 10:38:21.720890       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:21.720920       1 main.go:299] handling current node
	
	
	==> kube-apiserver [2870b9699fd97d290c5750a6361bd1eb6ac986ce8fb7e3f9eb6474155c6b1fa8] <==
	W0916 10:32:01.434298       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:02.466465       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:03.481058       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:04.547641       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:05.602664       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.287206       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:06.287255       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:32:06.288974       1 dispatcher.go:225] Failed calling webhook, failing closed mutatepod.volcano.sh: failed calling webhook "mutatepod.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/pods/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.354223       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:06.354265       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:32:06.355906       1 dispatcher.go:225] Failed calling webhook, failing closed mutatepod.volcano.sh: failed calling webhook "mutatepod.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/pods/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.623074       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:07.661520       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:08.760490       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:09.841622       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:10.917089       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:11.983956       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:13.046405       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:14.089526       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:25.289607       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:25.289652       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:33:06.298609       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:33:06.298665       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:33:06.363205       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:33:06.363253       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	
	
	==> kube-controller-manager [31da3c8e5867c3e2a6f4592fba3d201359a6c0c862a2620157496149c91a3b11] <==
	I0916 10:33:06.315720       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:06.334991       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:06.336091       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:06.351384       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:06.375003       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:06.382571       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:06.391010       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:06.403147       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:07.408707       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:07.423579       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:08.523312       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:08.551171       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:09.531505       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:09.540649       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:09.549169       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:09.560618       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:09.569582       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:09.577256       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:33.504028       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="13.523761ms"
	I0916 10:33:33.504493       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="52.742µs"
	I0916 10:33:39.026367       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:33:39.035647       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:33:39.082659       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:33:39.087612       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:33:49.934314       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	
	
	==> kube-proxy [35987f39fe9efffcbcdfe8a1694d2541bd561939f35f2770e06a09f005dcf753] <==
	I0916 10:30:51.148935       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:30:51.266541       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:30:51.266602       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:30:51.307434       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:30:51.307506       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:30:51.310004       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:30:51.310401       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:30:51.310420       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:30:51.312344       1 config.go:199] "Starting service config controller"
	I0916 10:30:51.312371       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:30:51.312398       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:30:51.312403       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:30:51.315085       1 config.go:328] "Starting node config controller"
	I0916 10:30:51.315100       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:30:51.413119       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:30:51.413177       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:30:51.415238       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [808425f96a2291f8e0cf3dfea11339a46bc25f8b4e1f82c29efc8eee8e1d729a] <==
	W0916 10:30:43.815016       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:30:43.815095       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.815504       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.815602       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.815794       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:30:43.815882       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.816048       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:30:43.816126       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.816295       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.817022       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817307       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:30:43.817404       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817601       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.817688       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817801       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:30:43.818028       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817989       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:30:43.818395       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.818315       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 10:30:43.818847       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.818381       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 10:30:43.819065       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.819318       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:30:43.819478       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:30:44.999991       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:36:49 addons-451841 kubelet[1517]: I0916 10:36:49.522877    1517 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="kube-system/registry-66c9cd494c-l957b" secret="" err="secret \"gcp-auth\" not found"
	Sep 16 10:36:53 addons-451841 kubelet[1517]: I0916 10:36:53.522463    1517 scope.go:117] "RemoveContainer" containerID="8822f01acad4dbe601571b1f9f74d6e699cd5dc6b81ca2296af8a81db5145e42"
	Sep 16 10:36:53 addons-451841 kubelet[1517]: E0916 10:36:53.523202    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:37:07 addons-451841 kubelet[1517]: I0916 10:37:07.524018    1517 scope.go:117] "RemoveContainer" containerID="8822f01acad4dbe601571b1f9f74d6e699cd5dc6b81ca2296af8a81db5145e42"
	Sep 16 10:37:07 addons-451841 kubelet[1517]: E0916 10:37:07.524226    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:37:22 addons-451841 kubelet[1517]: I0916 10:37:22.522875    1517 scope.go:117] "RemoveContainer" containerID="8822f01acad4dbe601571b1f9f74d6e699cd5dc6b81ca2296af8a81db5145e42"
	Sep 16 10:37:24 addons-451841 kubelet[1517]: E0916 10:37:24.114101    1517 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"68720ecdec7f6c8436d7b5f656706d95508d45407dc91a0b1d915fa8280f80fc\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c" cmd=["/bin/gadgettracermanager","-liveness"]
	Sep 16 10:37:24 addons-451841 kubelet[1517]: E0916 10:37:24.125504    1517 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"4a3d2129e274b4e1a6cfdbc8a08b298ac9d7c46355239fdfb38fda19484413bc\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c" cmd=["/bin/gadgettracermanager","-liveness"]
	Sep 16 10:37:24 addons-451841 kubelet[1517]: E0916 10:37:24.140609    1517 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = failed to exec in container: failed to start exec \"4765972b973dad3bba47c31475ef7a2e59a1e103aa8bf884db720d0c2b753515\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown" containerID="4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c" cmd=["/bin/gadgettracermanager","-liveness"]
	Sep 16 10:37:25 addons-451841 kubelet[1517]: I0916 10:37:25.100314    1517 scope.go:117] "RemoveContainer" containerID="8822f01acad4dbe601571b1f9f74d6e699cd5dc6b81ca2296af8a81db5145e42"
	Sep 16 10:37:25 addons-451841 kubelet[1517]: I0916 10:37:25.100795    1517 scope.go:117] "RemoveContainer" containerID="4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c"
	Sep 16 10:37:25 addons-451841 kubelet[1517]: E0916 10:37:25.100975    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:37:27 addons-451841 kubelet[1517]: I0916 10:37:27.112928    1517 scope.go:117] "RemoveContainer" containerID="4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c"
	Sep 16 10:37:27 addons-451841 kubelet[1517]: E0916 10:37:27.113140    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:37:29 addons-451841 kubelet[1517]: I0916 10:37:29.522642    1517 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="kube-system/nvidia-device-plugin-daemonset-l6r5c" secret="" err="secret \"gcp-auth\" not found"
	Sep 16 10:37:38 addons-451841 kubelet[1517]: I0916 10:37:38.522163    1517 scope.go:117] "RemoveContainer" containerID="4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c"
	Sep 16 10:37:38 addons-451841 kubelet[1517]: E0916 10:37:38.522378    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:37:47 addons-451841 kubelet[1517]: I0916 10:37:47.522375    1517 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="kube-system/registry-proxy-9cpxl" secret="" err="secret \"gcp-auth\" not found"
	Sep 16 10:37:52 addons-451841 kubelet[1517]: I0916 10:37:52.522934    1517 scope.go:117] "RemoveContainer" containerID="4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c"
	Sep 16 10:37:52 addons-451841 kubelet[1517]: E0916 10:37:52.523622    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:37:57 addons-451841 kubelet[1517]: I0916 10:37:57.522414    1517 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="kube-system/registry-66c9cd494c-l957b" secret="" err="secret \"gcp-auth\" not found"
	Sep 16 10:38:04 addons-451841 kubelet[1517]: I0916 10:38:04.522389    1517 scope.go:117] "RemoveContainer" containerID="4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c"
	Sep 16 10:38:04 addons-451841 kubelet[1517]: E0916 10:38:04.522608    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:38:16 addons-451841 kubelet[1517]: I0916 10:38:16.522001    1517 scope.go:117] "RemoveContainer" containerID="4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c"
	Sep 16 10:38:16 addons-451841 kubelet[1517]: E0916 10:38:16.522351    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	
	
	==> storage-provisioner [4ddb5fa614111a21d93d580947f3eb3b791d38fa6e497e66ae259ff6bb7fed15] <==
	I0916 10:30:56.265937       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:30:56.289948       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:30:56.290011       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:30:56.319402       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:30:56.319890       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"ef239dcc-ec3a-4a4d-b0db-6d9c8de888a1", APIVersion:"v1", ResourceVersion:"601", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157 became leader
	I0916 10:30:56.319948       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157!
	I0916 10:30:56.520389       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-451841 -n addons-451841
helpers_test.go:261: (dbg) Run:  kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (2.254368ms)
helpers_test.go:263: kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/serial/Volcano (294.57s)

                                                
                                    
x
+
TestAddons/serial/GCPAuth/Namespaces (0s)

                                                
                                                
=== RUN   TestAddons/serial/GCPAuth/Namespaces
addons_test.go:656: (dbg) Run:  kubectl --context addons-451841 create ns new-namespace
addons_test.go:656: (dbg) Non-zero exit: kubectl --context addons-451841 create ns new-namespace: fork/exec /usr/local/bin/kubectl: exec format error (497.416µs)
addons_test.go:658: kubectl --context addons-451841 create ns new-namespace failed: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/serial/GCPAuth/Namespaces (0.00s)

                                                
                                    
x
+
TestAddons/parallel/Registry (15.67s)

                                                
                                                
=== RUN   TestAddons/parallel/Registry
=== PAUSE TestAddons/parallel/Registry

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Registry
addons_test.go:332: registry stabilized in 4.287447ms
addons_test.go:334: (dbg) TestAddons/parallel/Registry: waiting 6m0s for pods matching "actual-registry=true" in namespace "kube-system" ...
helpers_test.go:344: "registry-66c9cd494c-l957b" [17af08bf-9965-4bec-8d1b-0c4c37167ac1] Running
addons_test.go:334: (dbg) TestAddons/parallel/Registry: actual-registry=true healthy within 6.004077009s
addons_test.go:337: (dbg) TestAddons/parallel/Registry: waiting 10m0s for pods matching "registry-proxy=true" in namespace "kube-system" ...
helpers_test.go:344: "registry-proxy-9cpxl" [10361d7a-9abb-41e6-88eb-2194d01e1301] Running
addons_test.go:337: (dbg) TestAddons/parallel/Registry: registry-proxy=true healthy within 6.003670779s
addons_test.go:342: (dbg) Run:  kubectl --context addons-451841 delete po -l run=registry-test --now
addons_test.go:342: (dbg) Non-zero exit: kubectl --context addons-451841 delete po -l run=registry-test --now: fork/exec /usr/local/bin/kubectl: exec format error (544.152µs)
addons_test.go:344: pre-cleanup kubectl --context addons-451841 delete po -l run=registry-test --now failed: fork/exec /usr/local/bin/kubectl: exec format error (not a problem)
addons_test.go:347: (dbg) Run:  kubectl --context addons-451841 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local"
addons_test.go:347: (dbg) Non-zero exit: kubectl --context addons-451841 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local": fork/exec /usr/local/bin/kubectl: exec format error (347.321µs)
addons_test.go:349: failed to hit registry.kube-system.svc.cluster.local. args "kubectl --context addons-451841 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c \"wget --spider -S http://registry.kube-system.svc.cluster.local\"" failed: fork/exec /usr/local/bin/kubectl: exec format error
addons_test.go:353: expected curl response be "HTTP/1.1 200", but got **
addons_test.go:361: (dbg) Run:  out/minikube-linux-arm64 -p addons-451841 ip
2024/09/16 10:38:43 [DEBUG] GET http://192.168.49.2:5000
addons_test.go:390: (dbg) Run:  out/minikube-linux-arm64 -p addons-451841 addons disable registry --alsologtostderr -v=1
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/parallel/Registry]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-451841
helpers_test.go:235: (dbg) docker inspect addons-451841:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4",
	        "Created": "2024-09-16T10:30:19.386072283Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2064804,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:30:19.514500967Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/hostname",
	        "HostsPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/hosts",
	        "LogPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4-json.log",
	        "Name": "/addons-451841",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-451841:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-451841",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/merged",
	                "UpperDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/diff",
	                "WorkDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-451841",
	                "Source": "/var/lib/docker/volumes/addons-451841/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-451841",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-451841",
	                "name.minikube.sigs.k8s.io": "addons-451841",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "4da7b9dd4db914ae48304dba9ae2b2fb9dab68040bc986bf2751a778e62e4524",
	            "SandboxKey": "/var/run/docker/netns/4da7b9dd4db9",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40577"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40578"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40581"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40579"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40580"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-451841": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "cd1315f485e3267c82ac80908081e901323e720ef1bb26de92d612c54dfd58d8",
	                    "EndpointID": "36f212e2a713c67d6c2ea54e50fbd0d8d7f7eb862ef913caa03a6cbfac71cb21",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-451841",
	                        "8a213d4c4dec"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-451841 -n addons-451841
helpers_test.go:244: <<< TestAddons/parallel/Registry FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/parallel/Registry]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-451841 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-451841 logs -n 25: (1.813232896s)
helpers_test.go:252: TestAddons/parallel/Registry logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                 Args                 |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC |                     |
	|         | -p download-only-911311              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0         |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-911311              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | -o=json --download-only              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | -p download-only-889126              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.31.1         |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-889126              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-911311              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-889126              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | --download-only -p                   | download-docker-956530 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | download-docker-956530               |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | -p download-docker-956530            | download-docker-956530 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | --download-only -p                   | binary-mirror-852743   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | binary-mirror-852743                 |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --binary-mirror                      |                        |         |         |                     |                     |
	|         | http://127.0.0.1:35351               |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-852743              | binary-mirror-852743   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| addons  | disable dashboard -p                 | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| addons  | enable dashboard -p                  | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| start   | -p addons-451841 --wait=true         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:33 UTC |
	|         | --memory=4000 --alsologtostderr      |                        |         |         |                     |                     |
	|         | --addons=registry                    |                        |         |         |                     |                     |
	|         | --addons=metrics-server              |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots             |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver         |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                    |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner               |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget            |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin        |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --addons=ingress                     |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                 |                        |         |         |                     |                     |
	| addons  | addons-451841 addons disable         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | yakd --alsologtostderr -v=1          |                        |         |         |                     |                     |
	| ip      | addons-451841 ip                     | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	| addons  | addons-451841 addons disable         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | registry --alsologtostderr           |                        |         |         |                     |                     |
	|         | -v=1                                 |                        |         |         |                     |                     |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:29:55
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:29:55.756900 2064308 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:29:55.757118 2064308 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:55.757146 2064308 out.go:358] Setting ErrFile to fd 2...
	I0916 10:29:55.757164 2064308 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:55.757443 2064308 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:29:55.757918 2064308 out.go:352] Setting JSON to false
	I0916 10:29:55.758950 2064308 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":137538,"bootTime":1726345058,"procs":192,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:29:55.759050 2064308 start.go:139] virtualization:  
	I0916 10:29:55.762450 2064308 out.go:177] * [addons-451841] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:29:55.765218 2064308 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:29:55.765320 2064308 notify.go:220] Checking for updates...
	I0916 10:29:55.771607 2064308 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:29:55.774426 2064308 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:29:55.777761 2064308 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:29:55.780330 2064308 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:29:55.782904 2064308 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:29:55.785688 2064308 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:29:55.807382 2064308 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:29:55.807515 2064308 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:55.863178 2064308 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:49 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:55.853088898 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:55.863303 2064308 docker.go:318] overlay module found
	I0916 10:29:55.867792 2064308 out.go:177] * Using the docker driver based on user configuration
	I0916 10:29:55.870461 2064308 start.go:297] selected driver: docker
	I0916 10:29:55.870476 2064308 start.go:901] validating driver "docker" against <nil>
	I0916 10:29:55.870490 2064308 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:29:55.871367 2064308 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:55.922454 2064308 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:49 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:55.912678011 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:55.922666 2064308 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:29:55.922995 2064308 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:29:55.925501 2064308 out.go:177] * Using Docker driver with root privileges
	I0916 10:29:55.928402 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:29:55.928468 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:29:55.928481 2064308 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:29:55.928561 2064308 start.go:340] cluster config:
	{Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHA
uthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:29:55.931349 2064308 out.go:177] * Starting "addons-451841" primary control-plane node in "addons-451841" cluster
	I0916 10:29:55.933847 2064308 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:29:55.936549 2064308 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:29:55.939027 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:29:55.939075 2064308 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:29:55.939087 2064308 cache.go:56] Caching tarball of preloaded images
	I0916 10:29:55.939127 2064308 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:29:55.939172 2064308 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:29:55.939183 2064308 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:29:55.939554 2064308 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json ...
	I0916 10:29:55.939585 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json: {Name:mk4b86ccd0e04a15f77246bcc432382e6ef83bd3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:29:55.955829 2064308 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:29:55.955957 2064308 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:29:55.955999 2064308 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:29:55.956009 2064308 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:29:55.956017 2064308 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:29:55.956025 2064308 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:30:13.033213 2064308 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:30:13.033255 2064308 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:30:13.033286 2064308 start.go:360] acquireMachinesLock for addons-451841: {Name:mk3e70771a060125a26a792bbbf3ad5672ad97bd Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:30:13.033421 2064308 start.go:364] duration metric: took 111.614µs to acquireMachinesLock for "addons-451841"
	I0916 10:30:13.033454 2064308 start.go:93] Provisioning new machine with config: &{Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fa
lse CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:30:13.033622 2064308 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:30:13.035916 2064308 out.go:235] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0916 10:30:13.036188 2064308 start.go:159] libmachine.API.Create for "addons-451841" (driver="docker")
	I0916 10:30:13.036228 2064308 client.go:168] LocalClient.Create starting
	I0916 10:30:13.036363 2064308 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 10:30:13.386329 2064308 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 10:30:13.561829 2064308 cli_runner.go:164] Run: docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:30:13.576129 2064308 cli_runner.go:211] docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:30:13.576212 2064308 network_create.go:284] running [docker network inspect addons-451841] to gather additional debugging logs...
	I0916 10:30:13.576235 2064308 cli_runner.go:164] Run: docker network inspect addons-451841
	W0916 10:30:13.591552 2064308 cli_runner.go:211] docker network inspect addons-451841 returned with exit code 1
	I0916 10:30:13.591606 2064308 network_create.go:287] error running [docker network inspect addons-451841]: docker network inspect addons-451841: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-451841 not found
	I0916 10:30:13.591621 2064308 network_create.go:289] output of [docker network inspect addons-451841]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-451841 not found
	
	** /stderr **
	I0916 10:30:13.591720 2064308 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:30:13.608306 2064308 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001aeacb0}
	I0916 10:30:13.608356 2064308 network_create.go:124] attempt to create docker network addons-451841 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:30:13.608420 2064308 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-451841 addons-451841
	I0916 10:30:13.683378 2064308 network_create.go:108] docker network addons-451841 192.168.49.0/24 created
	I0916 10:30:13.683411 2064308 kic.go:121] calculated static IP "192.168.49.2" for the "addons-451841" container
	I0916 10:30:13.683492 2064308 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:30:13.700184 2064308 cli_runner.go:164] Run: docker volume create addons-451841 --label name.minikube.sigs.k8s.io=addons-451841 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:30:13.718068 2064308 oci.go:103] Successfully created a docker volume addons-451841
	I0916 10:30:13.718179 2064308 cli_runner.go:164] Run: docker run --rm --name addons-451841-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --entrypoint /usr/bin/test -v addons-451841:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:30:15.421383 2064308 cli_runner.go:217] Completed: docker run --rm --name addons-451841-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --entrypoint /usr/bin/test -v addons-451841:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib: (1.703150378s)
	I0916 10:30:15.421417 2064308 oci.go:107] Successfully prepared a docker volume addons-451841
	I0916 10:30:15.421439 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:30:15.421458 2064308 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:30:15.421522 2064308 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-451841:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:30:19.320511 2064308 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-451841:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (3.898937148s)
	I0916 10:30:19.320548 2064308 kic.go:203] duration metric: took 3.899086612s to extract preloaded images to volume ...
	W0916 10:30:19.320695 2064308 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:30:19.320803 2064308 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:30:19.371670 2064308 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-451841 --name addons-451841 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-451841 --network addons-451841 --ip 192.168.49.2 --volume addons-451841:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:30:19.674459 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Running}}
	I0916 10:30:19.700795 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:19.725169 2064308 cli_runner.go:164] Run: docker exec addons-451841 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:30:19.777409 2064308 oci.go:144] the created container "addons-451841" has a running status.
	I0916 10:30:19.777438 2064308 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa...
	I0916 10:30:20.426549 2064308 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:30:20.459111 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:20.485764 2064308 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:30:20.485788 2064308 kic_runner.go:114] Args: [docker exec --privileged addons-451841 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:30:20.553044 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:20.584488 2064308 machine.go:93] provisionDockerMachine start ...
	I0916 10:30:20.584585 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.604705 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.605002 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.605024 2064308 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:30:20.750295 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-451841
	
	I0916 10:30:20.750323 2064308 ubuntu.go:169] provisioning hostname "addons-451841"
	I0916 10:30:20.750394 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.772671 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.772910 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.772922 2064308 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-451841 && echo "addons-451841" | sudo tee /etc/hostname
	I0916 10:30:20.923316 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-451841
	
	I0916 10:30:20.923448 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.940021 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.940274 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.940298 2064308 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-451841' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-451841/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-451841' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:30:21.087110 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:30:21.087184 2064308 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:30:21.087263 2064308 ubuntu.go:177] setting up certificates
	I0916 10:30:21.087293 2064308 provision.go:84] configureAuth start
	I0916 10:30:21.087450 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.105254 2064308 provision.go:143] copyHostCerts
	I0916 10:30:21.105342 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:30:21.105468 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:30:21.105537 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:30:21.105585 2064308 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.addons-451841 san=[127.0.0.1 192.168.49.2 addons-451841 localhost minikube]
	I0916 10:30:21.497343 2064308 provision.go:177] copyRemoteCerts
	I0916 10:30:21.497413 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:30:21.497456 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.514957 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.611658 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:30:21.636890 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:30:21.662172 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:30:21.686808 2064308 provision.go:87] duration metric: took 599.477164ms to configureAuth
	I0916 10:30:21.686873 2064308 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:30:21.687116 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:21.687133 2064308 machine.go:96] duration metric: took 1.102625588s to provisionDockerMachine
	I0916 10:30:21.687141 2064308 client.go:171] duration metric: took 8.650903893s to LocalClient.Create
	I0916 10:30:21.687161 2064308 start.go:167] duration metric: took 8.650974974s to libmachine.API.Create "addons-451841"
	I0916 10:30:21.687171 2064308 start.go:293] postStartSetup for "addons-451841" (driver="docker")
	I0916 10:30:21.687182 2064308 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:30:21.687249 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:30:21.687299 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.706431 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.804065 2064308 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:30:21.807409 2064308 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:30:21.807450 2064308 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:30:21.807462 2064308 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:30:21.807470 2064308 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:30:21.807482 2064308 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:30:21.807551 2064308 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:30:21.807581 2064308 start.go:296] duration metric: took 120.403063ms for postStartSetup
	I0916 10:30:21.807904 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.824820 2064308 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json ...
	I0916 10:30:21.825120 2064308 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:30:21.825171 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.841557 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.935711 2064308 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:30:21.940289 2064308 start.go:128] duration metric: took 8.906649729s to createHost
	I0916 10:30:21.940328 2064308 start.go:83] releasing machines lock for "addons-451841", held for 8.906892895s
	I0916 10:30:21.940401 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.957512 2064308 ssh_runner.go:195] Run: cat /version.json
	I0916 10:30:21.957582 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.957842 2064308 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:30:21.957901 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.986070 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.992358 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:22.209233 2064308 ssh_runner.go:195] Run: systemctl --version
	I0916 10:30:22.213896 2064308 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:30:22.218111 2064308 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:30:22.243931 2064308 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:30:22.244032 2064308 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:30:22.274074 2064308 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:30:22.274104 2064308 start.go:495] detecting cgroup driver to use...
	I0916 10:30:22.274139 2064308 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:30:22.274194 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:30:22.287113 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:30:22.299302 2064308 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:30:22.299412 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:30:22.313515 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:30:22.327839 2064308 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:30:22.409410 2064308 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:30:22.494962 2064308 docker.go:233] disabling docker service ...
	I0916 10:30:22.495100 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:30:22.515205 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:30:22.527495 2064308 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:30:22.611444 2064308 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:30:22.705471 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:30:22.717496 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:30:22.735435 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:30:22.746124 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:30:22.757226 2064308 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:30:22.757299 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:30:22.767541 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:30:22.779039 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:30:22.788821 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:30:22.799244 2064308 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:30:22.808704 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:30:22.820713 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:30:22.831851 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:30:22.842394 2064308 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:30:22.851545 2064308 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:30:22.860424 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:22.961475 2064308 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:30:23.100987 2064308 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:30:23.101138 2064308 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:30:23.105001 2064308 start.go:563] Will wait 60s for crictl version
	I0916 10:30:23.105079 2064308 ssh_runner.go:195] Run: which crictl
	I0916 10:30:23.108696 2064308 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:30:23.154724 2064308 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:30:23.154812 2064308 ssh_runner.go:195] Run: containerd --version
	I0916 10:30:23.179902 2064308 ssh_runner.go:195] Run: containerd --version
	I0916 10:30:23.208730 2064308 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:30:23.210246 2064308 cli_runner.go:164] Run: docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:30:23.225302 2064308 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:30:23.229071 2064308 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:30:23.240048 2064308 kubeadm.go:883] updating cluster {Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Cus
tomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:30:23.240172 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:30:23.240246 2064308 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:30:23.276242 2064308 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:30:23.276266 2064308 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:30:23.276331 2064308 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:30:23.312895 2064308 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:30:23.312924 2064308 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:30:23.312933 2064308 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 containerd true true} ...
	I0916 10:30:23.313028 2064308 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=addons-451841 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:30:23.313095 2064308 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:30:23.348552 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:30:23.348577 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:30:23.348587 2064308 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:30:23.348609 2064308 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-451841 NodeName:addons-451841 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc
/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:30:23.348742 2064308 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "addons-451841"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:30:23.348817 2064308 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:30:23.357634 2064308 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:30:23.357705 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:30:23.366468 2064308 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:30:23.385942 2064308 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:30:23.404422 2064308 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2167 bytes)
	I0916 10:30:23.422831 2064308 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:30:23.426382 2064308 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:30:23.437337 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:23.533359 2064308 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:30:23.547523 2064308 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841 for IP: 192.168.49.2
	I0916 10:30:23.547546 2064308 certs.go:194] generating shared ca certs ...
	I0916 10:30:23.547562 2064308 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:23.548238 2064308 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:30:24.056004 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt ...
	I0916 10:30:24.056043 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt: {Name:mk8fa0c4ced40ca68ac874100ce374f588dfea0b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.056261 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key ...
	I0916 10:30:24.056276 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key: {Name:mk04aab579c9f6bfd22c8de7442d64e7264cf4f3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.056381 2064308 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:30:24.923761 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt ...
	I0916 10:30:24.923793 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt: {Name:mke93617c0d085600c816f9e0c290a24fbe662eb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.923996 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key ...
	I0916 10:30:24.924009 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key: {Name:mk45200538cf11f718e98e7cfef8cbfcd0dafedf Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.924099 2064308 certs.go:256] generating profile certs ...
	I0916 10:30:24.924161 2064308 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key
	I0916 10:30:24.924189 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt with IP's: []
	I0916 10:30:25.053524 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt ...
	I0916 10:30:25.053557 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: {Name:mk37fa0b7d204f82c8af039a0f580deae8708ef5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.053750 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key ...
	I0916 10:30:25.053764 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key: {Name:mkdb13343be22c0a0f72ff55f3a3cbca00768e68 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.053853 2064308 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707
	I0916 10:30:25.053877 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0916 10:30:25.726904 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 ...
	I0916 10:30:25.726937 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707: {Name:mkf1dd897eefb9f7916ec8408e62b2271e638207 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.727141 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707 ...
	I0916 10:30:25.727156 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707: {Name:mkfbc7b493bc2e7d0b9e7f941111c820f07e3e82 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.727261 2064308 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt
	I0916 10:30:25.727361 2064308 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key
	I0916 10:30:25.727418 2064308 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key
	I0916 10:30:25.727439 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt with IP's: []
	I0916 10:30:26.011801 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt ...
	I0916 10:30:26.011842 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt: {Name:mkb21e9e32e986ac8dbc5fbe6c0db427fdb116ee Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:26.012049 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key ...
	I0916 10:30:26.012065 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key: {Name:mk95b366411d26459b0f1e143cac6384a51d5dfb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:26.012320 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:30:26.012368 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:30:26.012401 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:30:26.012429 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:30:26.013083 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:30:26.039152 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:30:26.064366 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:30:26.093086 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:30:26.116868 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0916 10:30:26.141663 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:30:26.166725 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:30:26.191142 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:30:26.214975 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:30:26.238979 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:30:26.256459 2064308 ssh_runner.go:195] Run: openssl version
	I0916 10:30:26.262089 2064308 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:30:26.271478 2064308 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.274966 2064308 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.275035 2064308 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.281888 2064308 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:30:26.291290 2064308 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:30:26.294471 2064308 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:30:26.294534 2064308 kubeadm.go:392] StartCluster: {Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Custom
QemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:30:26.294629 2064308 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:30:26.294715 2064308 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:30:26.332648 2064308 cri.go:89] found id: ""
	I0916 10:30:26.332740 2064308 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:30:26.341585 2064308 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:30:26.350524 2064308 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:30:26.350588 2064308 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:30:26.359218 2064308 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:30:26.359240 2064308 kubeadm.go:157] found existing configuration files:
	
	I0916 10:30:26.359319 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:30:26.368227 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:30:26.368297 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:30:26.377781 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:30:26.386494 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:30:26.386567 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:30:26.394932 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:30:26.403622 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:30:26.403687 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:30:26.412005 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:30:26.420862 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:30:26.420957 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:30:26.429543 2064308 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:30:26.471767 2064308 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:30:26.472019 2064308 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:30:26.498827 2064308 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:30:26.498904 2064308 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:30:26.498947 2064308 kubeadm.go:310] OS: Linux
	I0916 10:30:26.498998 2064308 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:30:26.499052 2064308 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:30:26.499103 2064308 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:30:26.499154 2064308 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:30:26.499218 2064308 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:30:26.499270 2064308 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:30:26.499320 2064308 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:30:26.499375 2064308 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:30:26.499426 2064308 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:30:26.577650 2064308 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:30:26.577762 2064308 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:30:26.577859 2064308 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:30:26.583045 2064308 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:30:26.586527 2064308 out.go:235]   - Generating certificates and keys ...
	I0916 10:30:26.586988 2064308 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:30:26.587103 2064308 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:30:26.754645 2064308 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:30:27.554793 2064308 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:30:28.039725 2064308 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:30:28.690015 2064308 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:30:29.764620 2064308 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:30:29.764907 2064308 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [addons-451841 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:30:30.341274 2064308 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:30:30.342274 2064308 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [addons-451841 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:30:30.576739 2064308 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:30:31.765912 2064308 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:30:33.601844 2064308 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:30:33.602129 2064308 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:30:34.584274 2064308 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:30:35.213888 2064308 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:30:35.990415 2064308 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:30:36.165269 2064308 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:30:36.564139 2064308 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:30:36.565009 2064308 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:30:36.568128 2064308 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:30:36.570826 2064308 out.go:235]   - Booting up control plane ...
	I0916 10:30:36.570944 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:30:36.571026 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:30:36.571834 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:30:36.583080 2064308 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:30:36.589082 2064308 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:30:36.589162 2064308 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:30:36.685676 2064308 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:30:36.685796 2064308 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:30:37.686643 2064308 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00127007s
	I0916 10:30:37.686760 2064308 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:30:44.689772 2064308 kubeadm.go:310] [api-check] The API server is healthy after 7.003101119s
	I0916 10:30:44.709044 2064308 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:30:44.727931 2064308 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:30:44.754458 2064308 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:30:44.754737 2064308 kubeadm.go:310] [mark-control-plane] Marking the node addons-451841 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:30:44.766739 2064308 kubeadm.go:310] [bootstrap-token] Using token: dx9pov.rexyyitopznv0w4v
	I0916 10:30:44.769416 2064308 out.go:235]   - Configuring RBAC rules ...
	I0916 10:30:44.769548 2064308 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:30:44.776785 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:30:44.785617 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:30:44.789704 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:30:44.794016 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:30:44.798127 2064308 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:30:45.099673 2064308 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:30:45.534575 2064308 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:30:46.098271 2064308 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:30:46.099422 2064308 kubeadm.go:310] 
	I0916 10:30:46.099510 2064308 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:30:46.099519 2064308 kubeadm.go:310] 
	I0916 10:30:46.099624 2064308 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:30:46.099640 2064308 kubeadm.go:310] 
	I0916 10:30:46.099673 2064308 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:30:46.099733 2064308 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:30:46.099783 2064308 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:30:46.099787 2064308 kubeadm.go:310] 
	I0916 10:30:46.099841 2064308 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:30:46.099846 2064308 kubeadm.go:310] 
	I0916 10:30:46.099898 2064308 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:30:46.099903 2064308 kubeadm.go:310] 
	I0916 10:30:46.099959 2064308 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:30:46.100036 2064308 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:30:46.100108 2064308 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:30:46.100113 2064308 kubeadm.go:310] 
	I0916 10:30:46.100201 2064308 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:30:46.100280 2064308 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:30:46.100285 2064308 kubeadm.go:310] 
	I0916 10:30:46.100377 2064308 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token dx9pov.rexyyitopznv0w4v \
	I0916 10:30:46.100482 2064308 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 10:30:46.100503 2064308 kubeadm.go:310] 	--control-plane 
	I0916 10:30:46.100507 2064308 kubeadm.go:310] 
	I0916 10:30:46.100599 2064308 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:30:46.100604 2064308 kubeadm.go:310] 
	I0916 10:30:46.100684 2064308 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token dx9pov.rexyyitopznv0w4v \
	I0916 10:30:46.100792 2064308 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 10:30:46.104209 2064308 kubeadm.go:310] W0916 10:30:26.468492    1025 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:30:46.104508 2064308 kubeadm.go:310] W0916 10:30:26.469422    1025 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:30:46.104733 2064308 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:30:46.104841 2064308 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:30:46.104863 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:30:46.104872 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:30:46.107753 2064308 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:30:46.110419 2064308 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:30:46.114304 2064308 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:30:46.114327 2064308 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:30:46.132060 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:30:46.405649 2064308 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:30:46.405772 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:46.405844 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-451841 minikube.k8s.io/updated_at=2024_09_16T10_30_46_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=addons-451841 minikube.k8s.io/primary=true
	I0916 10:30:46.544610 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:46.544668 2064308 ops.go:34] apiserver oom_adj: -16
	I0916 10:30:47.045343 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:47.544713 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:48.045593 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:48.545262 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:49.044804 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:49.545373 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:50.045197 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:50.206616 2064308 kubeadm.go:1113] duration metric: took 3.800886781s to wait for elevateKubeSystemPrivileges
	I0916 10:30:50.206650 2064308 kubeadm.go:394] duration metric: took 23.912135022s to StartCluster
	I0916 10:30:50.206760 2064308 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:50.206888 2064308 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:30:50.207291 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:50.207495 2064308 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:30:50.207664 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:30:50.207912 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:50.207954 2064308 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0916 10:30:50.208037 2064308 addons.go:69] Setting yakd=true in profile "addons-451841"
	I0916 10:30:50.208056 2064308 addons.go:234] Setting addon yakd=true in "addons-451841"
	I0916 10:30:50.208079 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.208590 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.209289 2064308 addons.go:69] Setting metrics-server=true in profile "addons-451841"
	I0916 10:30:50.209312 2064308 addons.go:234] Setting addon metrics-server=true in "addons-451841"
	I0916 10:30:50.209362 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.209903 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.210207 2064308 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-451841"
	I0916 10:30:50.210240 2064308 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-451841"
	I0916 10:30:50.210263 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.210767 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.218758 2064308 addons.go:69] Setting registry=true in profile "addons-451841"
	I0916 10:30:50.218798 2064308 addons.go:234] Setting addon registry=true in "addons-451841"
	I0916 10:30:50.218832 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.219427 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.219602 2064308 addons.go:69] Setting cloud-spanner=true in profile "addons-451841"
	I0916 10:30:50.219647 2064308 addons.go:234] Setting addon cloud-spanner=true in "addons-451841"
	I0916 10:30:50.219685 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.221722 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.222266 2064308 addons.go:69] Setting storage-provisioner=true in profile "addons-451841"
	I0916 10:30:50.222288 2064308 addons.go:234] Setting addon storage-provisioner=true in "addons-451841"
	I0916 10:30:50.222314 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.222854 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.243982 2064308 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-451841"
	I0916 10:30:50.244056 2064308 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-451841"
	I0916 10:30:50.244103 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.244878 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.250996 2064308 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-451841"
	I0916 10:30:50.251033 2064308 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-451841"
	I0916 10:30:50.251403 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.262479 2064308 addons.go:69] Setting volcano=true in profile "addons-451841"
	I0916 10:30:50.262526 2064308 addons.go:234] Setting addon volcano=true in "addons-451841"
	I0916 10:30:50.262567 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.263124 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.310432 2064308 addons.go:69] Setting default-storageclass=true in profile "addons-451841"
	I0916 10:30:50.310537 2064308 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-451841"
	I0916 10:30:50.311117 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.312245 2064308 addons.go:69] Setting volumesnapshots=true in profile "addons-451841"
	I0916 10:30:50.312377 2064308 addons.go:234] Setting addon volumesnapshots=true in "addons-451841"
	I0916 10:30:50.312448 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.313757 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.327865 2064308 addons.go:69] Setting gcp-auth=true in profile "addons-451841"
	I0916 10:30:50.327962 2064308 mustload.go:65] Loading cluster: addons-451841
	I0916 10:30:50.330380 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:50.330866 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.331146 2064308 out.go:177] * Verifying Kubernetes components...
	I0916 10:30:50.334941 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:50.349812 2064308 addons.go:69] Setting ingress=true in profile "addons-451841"
	I0916 10:30:50.349850 2064308 addons.go:234] Setting addon ingress=true in "addons-451841"
	I0916 10:30:50.349897 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.350438 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.350648 2064308 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-451841"
	I0916 10:30:50.350723 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.351151 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.369853 2064308 addons.go:69] Setting ingress-dns=true in profile "addons-451841"
	I0916 10:30:50.369893 2064308 addons.go:234] Setting addon ingress-dns=true in "addons-451841"
	I0916 10:30:50.369937 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.370407 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.375867 2064308 out.go:177]   - Using image docker.io/registry:2.8.3
	I0916 10:30:50.382808 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0916 10:30:50.384082 2064308 addons.go:69] Setting inspektor-gadget=true in profile "addons-451841"
	I0916 10:30:50.384111 2064308 addons.go:234] Setting addon inspektor-gadget=true in "addons-451841"
	I0916 10:30:50.384143 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.384714 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.421644 2064308 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.2
	I0916 10:30:50.424401 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0916 10:30:50.424443 2064308 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0916 10:30:50.424517 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.438309 2064308 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.16.2
	I0916 10:30:50.438567 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0916 10:30:50.438585 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (860 bytes)
	I0916 10:30:50.438646 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.439842 2064308 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0916 10:30:50.440236 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-webhook-manager:v1.9.0
	I0916 10:30:50.440402 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.462370 2064308 addons.go:234] Setting addon default-storageclass=true in "addons-451841"
	I0916 10:30:50.462409 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.463191 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.463463 2064308 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:30:50.466889 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0916 10:30:50.467021 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.474834 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0916 10:30:50.474857 2064308 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0916 10:30:50.474919 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.484574 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:30:50.485713 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0916 10:30:50.508488 2064308 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.23
	I0916 10:30:50.525937 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-controller-manager:v1.9.0
	I0916 10:30:50.526169 2064308 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:30:50.526183 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:30:50.526247 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.542222 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0916 10:30:50.543269 2064308 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0916 10:30:50.543418 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0916 10:30:50.543483 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.563839 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-scheduler:v1.9.0
	I0916 10:30:50.567954 2064308 addons.go:431] installing /etc/kubernetes/addons/volcano-deployment.yaml
	I0916 10:30:50.567983 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volcano-deployment.yaml (434001 bytes)
	I0916 10:30:50.568053 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.583888 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.587279 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.3
	I0916 10:30:50.587486 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.11.2
	I0916 10:30:50.589757 2064308 out.go:177]   - Using image docker.io/busybox:stable
	I0916 10:30:50.589894 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0916 10:30:50.592333 2064308 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:30:50.592357 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0916 10:30:50.592588 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.594469 2064308 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0916 10:30:50.594639 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:30:50.596571 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0916 10:30:50.596784 2064308 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:30:50.596798 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0916 10:30:50.596863 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.628847 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:30:50.631659 2064308 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:30:50.631684 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0916 10:30:50.631748 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.645470 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.650239 2064308 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0
	I0916 10:30:50.650364 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0916 10:30:50.650401 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0916 10:30:50.652227 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.653039 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0916 10:30:50.654718 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0916 10:30:50.654790 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.661463 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	I0916 10:30:50.661708 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.654527 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0916 10:30:50.662209 2064308 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0916 10:30:50.662349 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.674173 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0916 10:30:50.676994 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0916 10:30:50.680275 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0916 10:30:50.680305 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0916 10:30:50.680378 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.691037 2064308 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:30:50.691057 2064308 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:30:50.691123 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.774282 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.780046 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.807312 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.827826 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.831006 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.853363 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.867169 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.875051 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.875081 2064308 retry.go:31] will retry after 209.079202ms: ssh: handshake failed: EOF
	I0916 10:30:50.875514 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.878034 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.878076 2064308 retry.go:31] will retry after 358.329045ms: ssh: handshake failed: EOF
	I0916 10:30:50.878970 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.891671 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.913115 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.913144 2064308 retry.go:31] will retry after 291.220359ms: ssh: handshake failed: EOF
	W0916 10:30:51.085514 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:51.085558 2064308 retry.go:31] will retry after 406.090408ms: ssh: handshake failed: EOF
	I0916 10:30:51.380959 2064308 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml": (1.173254923s)
	I0916 10:30:51.381043 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:30:51.381158 2064308 ssh_runner.go:235] Completed: sudo systemctl daemon-reload: (1.046146925s)
	I0916 10:30:51.381191 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:30:51.381193 2064308 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:30:51.393457 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0916 10:30:51.393478 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0916 10:30:51.405074 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:30:51.536141 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:30:51.553523 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0916 10:30:51.553553 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0916 10:30:51.664299 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0916 10:30:51.664331 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0916 10:30:51.694553 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0916 10:30:51.694580 2064308 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0916 10:30:51.695380 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:30:51.703369 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0916 10:30:51.703394 2064308 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0916 10:30:51.711436 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0916 10:30:51.711460 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0916 10:30:51.716209 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml
	I0916 10:30:51.833745 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0916 10:30:51.872114 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0916 10:30:51.872156 2064308 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0916 10:30:51.879573 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0916 10:30:51.879603 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0916 10:30:51.894115 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:30:51.927534 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0916 10:30:51.927573 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0916 10:30:51.967967 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0916 10:30:51.967997 2064308 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0916 10:30:51.987647 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:30:51.987672 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0916 10:30:52.018799 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0916 10:30:52.018835 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0916 10:30:52.040829 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:30:52.040863 2064308 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0916 10:30:52.062309 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0916 10:30:52.062358 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0916 10:30:52.141020 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0916 10:30:52.141055 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0916 10:30:52.150339 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:30:52.156143 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0916 10:30:52.156182 2064308 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0916 10:30:52.267727 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0916 10:30:52.267754 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0916 10:30:52.278358 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0916 10:30:52.278410 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0916 10:30:52.295260 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:30:52.315396 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:30:52.396988 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0916 10:30:52.397029 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0916 10:30:52.414014 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:30:52.414040 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0916 10:30:52.514419 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0916 10:30:52.514447 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0916 10:30:52.534199 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0916 10:30:52.534239 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0916 10:30:52.671597 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (1.290515457s)
	I0916 10:30:52.715081 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0916 10:30:52.715111 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0916 10:30:52.719191 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:30:52.832284 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0916 10:30:52.832313 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0916 10:30:52.924488 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0916 10:30:52.924521 2064308 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0916 10:30:53.168769 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:30:53.168802 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0916 10:30:53.177620 2064308 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (1.796357981s)
	I0916 10:30:53.177657 2064308 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:30:53.177732 2064308 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (1.79643144s)
	I0916 10:30:53.179507 2064308 node_ready.go:35] waiting up to 6m0s for node "addons-451841" to be "Ready" ...
	I0916 10:30:53.184404 2064308 node_ready.go:49] node "addons-451841" has status "Ready":"True"
	I0916 10:30:53.184443 2064308 node_ready.go:38] duration metric: took 4.710029ms for node "addons-451841" to be "Ready" ...
	I0916 10:30:53.184458 2064308 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:30:53.197525 2064308 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace to be "Ready" ...
	I0916 10:30:53.269899 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0916 10:30:53.269941 2064308 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0916 10:30:53.282557 2064308 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:30:53.282590 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0916 10:30:53.466493 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0916 10:30:53.466519 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0916 10:30:53.471643 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:30:53.602578 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:30:53.683185 2064308 kapi.go:214] "coredns" deployment in "kube-system" namespace and "addons-451841" context rescaled to 1 replicas
	I0916 10:30:53.701295 2064308 pod_ready.go:98] error getting pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bvmss" not found
	I0916 10:30:53.701323 2064308 pod_ready.go:82] duration metric: took 503.765362ms for pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace to be "Ready" ...
	E0916 10:30:53.701335 2064308 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bvmss" not found
	I0916 10:30:53.701342 2064308 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace to be "Ready" ...
	I0916 10:30:53.722187 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0916 10:30:53.722214 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0916 10:30:54.162813 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:30:54.162856 2064308 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0916 10:30:54.507449 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:30:55.304651 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (3.899501244s)
	I0916 10:30:55.634996 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (3.939582165s)
	I0916 10:30:55.635110 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (4.098941534s)
	I0916 10:30:55.711983 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:30:57.666996 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0916 10:30:57.667089 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:57.696419 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:57.712916 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:30:58.304674 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0916 10:30:58.423037 2064308 addons.go:234] Setting addon gcp-auth=true in "addons-451841"
	I0916 10:30:58.423145 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:58.423647 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:58.453963 2064308 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0916 10:30:58.454022 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:58.488418 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:59.724111 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:01.085964 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml: (9.369716418s)
	I0916 10:31:01.086088 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (9.252309206s)
	I0916 10:31:01.086143 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (9.191998071s)
	I0916 10:31:01.086179 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (8.93580956s)
	I0916 10:31:01.086966 2064308 addons.go:475] Verifying addon registry=true in "addons-451841"
	I0916 10:31:01.086280 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (8.790993869s)
	I0916 10:31:01.087161 2064308 addons.go:475] Verifying addon ingress=true in "addons-451841"
	I0916 10:31:01.086364 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (8.367136002s)
	I0916 10:31:01.086423 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (7.614752608s)
	I0916 10:31:01.086494 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (7.483872887s)
	I0916 10:31:01.086607 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (8.771185269s)
	I0916 10:31:01.087690 2064308 addons.go:475] Verifying addon metrics-server=true in "addons-451841"
	W0916 10:31:01.087784 2064308 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:31:01.087807 2064308 retry.go:31] will retry after 241.995667ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:31:01.089709 2064308 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-451841 service yakd-dashboard -n yakd-dashboard
	
	I0916 10:31:01.089714 2064308 out.go:177] * Verifying ingress addon...
	I0916 10:31:01.089782 2064308 out.go:177] * Verifying registry addon...
	I0916 10:31:01.092615 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0916 10:31:01.093670 2064308 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0916 10:31:01.146629 2064308 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0916 10:31:01.146661 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:01.147988 2064308 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:31:01.148013 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:01.330778 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:31:01.607432 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:01.608116 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:01.783267 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (7.275754054s)
	I0916 10:31:01.783417 2064308 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-451841"
	I0916 10:31:01.783367 2064308 ssh_runner.go:235] Completed: cat /var/lib/minikube/google_application_credentials.json: (3.329378043s)
	I0916 10:31:01.785766 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:31:01.785796 2064308 out.go:177] * Verifying csi-hostpath-driver addon...
	I0916 10:31:01.788664 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0916 10:31:01.789894 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0916 10:31:01.794958 2064308 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:31:01.795006 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:01.797295 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0916 10:31:01.797332 2064308 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0916 10:31:01.893997 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0916 10:31:01.894071 2064308 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0916 10:31:01.937742 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:31:01.937810 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0916 10:31:01.987240 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:31:02.097286 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:02.100875 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:02.209340 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:02.305635 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:02.597370 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:02.599723 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:02.795942 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:03.100397 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:03.103196 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:03.312002 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:03.374850 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml: (1.387566006s)
	I0916 10:31:03.375988 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (2.045090366s)
	I0916 10:31:03.378127 2064308 addons.go:475] Verifying addon gcp-auth=true in "addons-451841"
	I0916 10:31:03.381777 2064308 out.go:177] * Verifying gcp-auth addon...
	I0916 10:31:03.384298 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0916 10:31:03.406084 2064308 kapi.go:86] Found 0 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:31:03.599867 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:03.600481 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:03.720241 2064308 pod_ready.go:93] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.720276 2064308 pod_ready.go:82] duration metric: took 10.018926311s for pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.720289 2064308 pod_ready.go:79] waiting up to 6m0s for pod "etcd-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.746042 2064308 pod_ready.go:93] pod "etcd-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.746067 2064308 pod_ready.go:82] duration metric: took 25.771231ms for pod "etcd-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.746081 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.752533 2064308 pod_ready.go:93] pod "kube-apiserver-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.752559 2064308 pod_ready.go:82] duration metric: took 6.470582ms for pod "kube-apiserver-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.752571 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.758462 2064308 pod_ready.go:93] pod "kube-controller-manager-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.758495 2064308 pod_ready.go:82] duration metric: took 5.916018ms for pod "kube-controller-manager-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.758507 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tltkn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.765336 2064308 pod_ready.go:93] pod "kube-proxy-tltkn" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.765369 2064308 pod_ready.go:82] duration metric: took 6.854119ms for pod "kube-proxy-tltkn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.765382 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.795811 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:04.099344 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:04.100673 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:04.104903 2064308 pod_ready.go:93] pod "kube-scheduler-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:04.104972 2064308 pod_ready.go:82] duration metric: took 339.581954ms for pod "kube-scheduler-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:04.104999 2064308 pod_ready.go:79] waiting up to 6m0s for pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:04.295860 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:04.598910 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:04.602815 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:04.795954 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:05.100224 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:05.101534 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:05.296166 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:05.599439 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:05.601426 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:05.795442 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:06.102393 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:06.103036 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:06.122130 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:06.299045 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:06.599932 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:06.601206 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:06.814263 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:07.096848 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:07.101223 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:07.295217 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:07.599444 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:07.600431 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:07.795082 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:08.101892 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:08.102976 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:08.296014 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:08.598395 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:08.598643 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:08.613020 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:08.795739 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:09.103941 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:09.104967 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:09.295694 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:09.599659 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:09.601180 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:09.796354 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:10.098446 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:10.099577 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:10.295198 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:10.597281 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:10.599286 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:10.616287 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:10.795720 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:11.097801 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:11.099342 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:11.297048 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:11.599247 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:11.599974 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:11.794513 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:12.097432 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:12.099058 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:12.295097 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:12.598578 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:12.599897 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:12.796822 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:13.096898 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:13.098940 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:13.112547 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:13.295802 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:13.599642 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:13.600761 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:13.794583 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:14.096452 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:14.098429 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:14.297517 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:14.598010 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:14.599983 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:14.795140 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:15.104125 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:15.104975 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:15.113778 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:15.295679 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:15.598018 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:15.598555 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:15.795791 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:16.096811 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:16.099236 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:16.296057 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:16.597945 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:16.599646 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:16.797262 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:17.098985 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:17.099689 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:17.295469 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:17.599269 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:17.600683 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:17.611951 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:17.794427 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:18.099862 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:18.101710 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:18.296191 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:18.596772 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:18.600049 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:18.811403 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:19.098130 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:19.099143 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:19.111509 2064308 pod_ready.go:93] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:19.111570 2064308 pod_ready.go:82] duration metric: took 15.006549742s for pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:19.111587 2064308 pod_ready.go:39] duration metric: took 25.927112572s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:31:19.111604 2064308 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:31:19.111670 2064308 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:31:19.162518 2064308 api_server.go:72] duration metric: took 28.954985289s to wait for apiserver process to appear ...
	I0916 10:31:19.162546 2064308 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:31:19.162572 2064308 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:31:19.179642 2064308 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:31:19.180628 2064308 api_server.go:141] control plane version: v1.31.1
	I0916 10:31:19.180658 2064308 api_server.go:131] duration metric: took 18.103285ms to wait for apiserver health ...
	I0916 10:31:19.180668 2064308 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:31:19.201200 2064308 system_pods.go:59] 18 kube-system pods found
	I0916 10:31:19.201280 2064308 system_pods.go:61] "coredns-7c65d6cfc9-jqthn" [bc44b698-a863-4ab8-85d8-5bd54d141bd3] Running
	I0916 10:31:19.201299 2064308 system_pods.go:61] "csi-hostpath-attacher-0" [b2e317f7-5d09-4cd6-823d-cc849d3bf9e2] Running
	I0916 10:31:19.201316 2064308 system_pods.go:61] "csi-hostpath-resizer-0" [1d31c631-4247-4ffe-8a87-9b11803bc389] Running
	I0916 10:31:19.201350 2064308 system_pods.go:61] "csi-hostpathplugin-r28vj" [ad580f65-4a4a-445c-bec7-b518245397ea] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0916 10:31:19.201372 2064308 system_pods.go:61] "etcd-addons-451841" [1a9d168e-83c3-4fc0-b188-1da688352b51] Running
	I0916 10:31:19.201392 2064308 system_pods.go:61] "kindnet-zckxr" [b831bf61-6a35-4681-afe4-f5a0f875a7b5] Running
	I0916 10:31:19.201409 2064308 system_pods.go:61] "kube-apiserver-addons-451841" [db38aff8-23ff-466f-a1b0-ab5f2041183c] Running
	I0916 10:31:19.201425 2064308 system_pods.go:61] "kube-controller-manager-addons-451841" [53d22d78-137d-4306-b8df-d9a55061c9df] Running
	I0916 10:31:19.201458 2064308 system_pods.go:61] "kube-ingress-dns-minikube" [c77fac29-39b0-4e39-94f6-a72ac395b130] Running
	I0916 10:31:19.201483 2064308 system_pods.go:61] "kube-proxy-tltkn" [073e7c9c-1ec1-45db-9603-68862d546266] Running
	I0916 10:31:19.201501 2064308 system_pods.go:61] "kube-scheduler-addons-451841" [5b953c55-7b45-4221-89bf-177d1a4efd05] Running
	I0916 10:31:19.201520 2064308 system_pods.go:61] "metrics-server-84c5f94fbc-q47pm" [2252baaa-acbc-43dd-b38e-e8cd59ac7825] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0916 10:31:19.201537 2064308 system_pods.go:61] "nvidia-device-plugin-daemonset-l6r5c" [9645617a-ab29-4c4e-a4ec-4ea217ffb5fb] Running
	I0916 10:31:19.201566 2064308 system_pods.go:61] "registry-66c9cd494c-l957b" [17af08bf-9965-4bec-8d1b-0c4c37167ac1] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
	I0916 10:31:19.201591 2064308 system_pods.go:61] "registry-proxy-9cpxl" [10361d7a-9abb-41e6-88eb-2194d01e1301] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0916 10:31:19.201613 2064308 system_pods.go:61] "snapshot-controller-56fcc65765-6llf9" [7febb7b8-15ce-499d-83f7-b82e03e35902] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.201634 2064308 system_pods.go:61] "snapshot-controller-56fcc65765-qxvll" [0a01a9d7-31e0-4965-baba-078f44b17fac] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.201663 2064308 system_pods.go:61] "storage-provisioner" [6a86a07f-e25d-4ac9-9c07-dc9ae0048083] Running
	I0916 10:31:19.201686 2064308 system_pods.go:74] duration metric: took 21.010389ms to wait for pod list to return data ...
	I0916 10:31:19.201707 2064308 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:31:19.204845 2064308 default_sa.go:45] found service account: "default"
	I0916 10:31:19.204868 2064308 default_sa.go:55] duration metric: took 3.144001ms for default service account to be created ...
	I0916 10:31:19.204877 2064308 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:31:19.219489 2064308 system_pods.go:86] 18 kube-system pods found
	I0916 10:31:19.219563 2064308 system_pods.go:89] "coredns-7c65d6cfc9-jqthn" [bc44b698-a863-4ab8-85d8-5bd54d141bd3] Running
	I0916 10:31:19.219586 2064308 system_pods.go:89] "csi-hostpath-attacher-0" [b2e317f7-5d09-4cd6-823d-cc849d3bf9e2] Running
	I0916 10:31:19.219605 2064308 system_pods.go:89] "csi-hostpath-resizer-0" [1d31c631-4247-4ffe-8a87-9b11803bc389] Running
	I0916 10:31:19.219640 2064308 system_pods.go:89] "csi-hostpathplugin-r28vj" [ad580f65-4a4a-445c-bec7-b518245397ea] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0916 10:31:19.219669 2064308 system_pods.go:89] "etcd-addons-451841" [1a9d168e-83c3-4fc0-b188-1da688352b51] Running
	I0916 10:31:19.219690 2064308 system_pods.go:89] "kindnet-zckxr" [b831bf61-6a35-4681-afe4-f5a0f875a7b5] Running
	I0916 10:31:19.219710 2064308 system_pods.go:89] "kube-apiserver-addons-451841" [db38aff8-23ff-466f-a1b0-ab5f2041183c] Running
	I0916 10:31:19.219728 2064308 system_pods.go:89] "kube-controller-manager-addons-451841" [53d22d78-137d-4306-b8df-d9a55061c9df] Running
	I0916 10:31:19.219766 2064308 system_pods.go:89] "kube-ingress-dns-minikube" [c77fac29-39b0-4e39-94f6-a72ac395b130] Running
	I0916 10:31:19.219784 2064308 system_pods.go:89] "kube-proxy-tltkn" [073e7c9c-1ec1-45db-9603-68862d546266] Running
	I0916 10:31:19.219799 2064308 system_pods.go:89] "kube-scheduler-addons-451841" [5b953c55-7b45-4221-89bf-177d1a4efd05] Running
	I0916 10:31:19.219819 2064308 system_pods.go:89] "metrics-server-84c5f94fbc-q47pm" [2252baaa-acbc-43dd-b38e-e8cd59ac7825] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0916 10:31:19.219847 2064308 system_pods.go:89] "nvidia-device-plugin-daemonset-l6r5c" [9645617a-ab29-4c4e-a4ec-4ea217ffb5fb] Running
	I0916 10:31:19.219875 2064308 system_pods.go:89] "registry-66c9cd494c-l957b" [17af08bf-9965-4bec-8d1b-0c4c37167ac1] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
	I0916 10:31:19.219896 2064308 system_pods.go:89] "registry-proxy-9cpxl" [10361d7a-9abb-41e6-88eb-2194d01e1301] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0916 10:31:19.219915 2064308 system_pods.go:89] "snapshot-controller-56fcc65765-6llf9" [7febb7b8-15ce-499d-83f7-b82e03e35902] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.219935 2064308 system_pods.go:89] "snapshot-controller-56fcc65765-qxvll" [0a01a9d7-31e0-4965-baba-078f44b17fac] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.219968 2064308 system_pods.go:89] "storage-provisioner" [6a86a07f-e25d-4ac9-9c07-dc9ae0048083] Running
	I0916 10:31:19.219989 2064308 system_pods.go:126] duration metric: took 15.104177ms to wait for k8s-apps to be running ...
	I0916 10:31:19.220008 2064308 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:31:19.220090 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:31:19.263162 2064308 system_svc.go:56] duration metric: took 43.144676ms WaitForService to wait for kubelet
	I0916 10:31:19.263243 2064308 kubeadm.go:582] duration metric: took 29.055714708s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:31:19.263279 2064308 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:31:19.272478 2064308 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:31:19.272561 2064308 node_conditions.go:123] node cpu capacity is 2
	I0916 10:31:19.272591 2064308 node_conditions.go:105] duration metric: took 9.29091ms to run NodePressure ...
	I0916 10:31:19.272616 2064308 start.go:241] waiting for startup goroutines ...
	I0916 10:31:19.305039 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:19.605207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:19.605801 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:19.797193 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:20.099691 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:20.101048 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:20.295291 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:20.597682 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:20.598569 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:20.797887 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:21.096766 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:21.099258 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:21.294755 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:21.597973 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:21.600238 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:21.803444 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:22.097870 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:22.100851 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:22.295006 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:22.597700 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:22.598742 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:22.795839 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:23.096175 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:23.098155 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:23.294814 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:23.596166 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:23.598634 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:23.795172 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:24.096643 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:24.099715 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:24.297255 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:24.598721 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:24.599933 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:24.795260 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:25.098369 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:25.101032 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:25.295093 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:25.597734 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:25.597966 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:25.795323 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:26.096041 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:26.099677 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:26.295063 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:26.597593 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:26.599159 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:26.795825 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:27.098811 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:27.099453 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:27.295012 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:27.597182 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:27.601645 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:27.795056 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:28.128064 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:28.129640 2064308 kapi.go:107] duration metric: took 27.037023988s to wait for kubernetes.io/minikube-addons=registry ...
	I0916 10:31:28.325425 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:28.598623 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:28.795615 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:29.104511 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:29.295646 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:29.598962 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:29.795067 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:30.099851 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:30.296647 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:30.598332 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:30.796058 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:31.099992 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:31.294874 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:31.598117 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:31.796531 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:32.098393 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:32.295287 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:32.598055 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:32.795217 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:33.099311 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:33.295339 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:33.598188 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:33.795029 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:34.098345 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:34.295712 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:34.598442 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:34.795386 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:35.098874 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:35.295415 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:35.598136 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:35.795586 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:36.098658 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:36.294379 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:36.598764 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:36.795529 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:37.098523 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:37.296711 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:37.601252 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:37.799472 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:38.100971 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:38.298686 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:38.599535 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:38.795481 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:39.098734 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:39.296827 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:39.611876 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:39.851830 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:40.108718 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:40.295843 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:40.599050 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:40.795575 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:41.098568 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:41.295957 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:41.598039 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:41.796038 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:42.099484 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:42.295707 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:42.598887 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:42.795416 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:43.099107 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:43.295766 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:43.599999 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:43.795242 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:44.098395 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:44.295957 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:44.600054 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:44.794470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:45.100863 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:45.295685 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:45.599065 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:45.798514 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:46.099116 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:46.296057 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:46.599389 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:46.796585 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:47.099083 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:47.296145 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:47.598490 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:47.797079 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:48.100448 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:48.295294 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:48.598227 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:48.794662 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:49.119185 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:49.295351 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:49.598797 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:49.794962 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:50.098374 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:50.295501 2064308 kapi.go:107] duration metric: took 48.505612662s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0916 10:31:50.598550 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:51.098277 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:51.598976 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:52.098206 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:52.597960 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:53.098585 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:53.598884 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:54.098582 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:54.598852 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:55.098478 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:55.598212 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:56.098517 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:56.598412 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:57.098499 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:57.598710 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:58.097637 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:58.599134 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:59.098778 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:59.598318 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:00.130067 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:00.598955 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:01.098901 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:01.598465 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:02.098925 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:02.598148 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:03.102570 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:03.598295 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:04.099028 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:04.598994 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:05.100186 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:05.598454 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:06.098931 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:06.598336 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:07.098800 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:07.599302 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:08.098401 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:08.599190 2064308 kapi.go:107] duration metric: took 1m7.505513413s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0916 10:32:25.388811 2064308 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:32:25.388836 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:25.888825 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:26.388022 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:26.887847 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:27.387834 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:27.888795 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:28.387767 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:28.887542 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:29.388486 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:29.888784 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:30.387676 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:30.888490 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:31.388236 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:31.888242 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:32.387732 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:32.888206 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:33.387868 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:33.887962 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:34.387683 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:34.889279 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:35.388145 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:35.887555 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:36.389045 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:36.887848 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:37.388742 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:37.888016 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:38.388211 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:38.887716 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:39.388708 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:39.888575 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:40.388841 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:40.888385 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:41.388668 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:41.887792 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:42.388021 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:42.888125 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:43.388320 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:43.887796 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:44.388101 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:44.888791 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:45.391207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:45.888190 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:46.387869 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:46.887554 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:47.388470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:47.888177 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:48.387903 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:48.888232 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:49.388449 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:49.888527 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:50.388650 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:50.888495 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:51.388590 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:51.888197 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:52.387563 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:52.888238 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:53.388322 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:53.887557 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:54.388664 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:54.888836 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:55.388171 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:55.888180 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:56.388322 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:56.888567 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:57.388117 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:57.887422 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:58.388230 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:58.887872 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:59.396878 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:59.888550 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:00.394252 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:00.887612 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:01.392523 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:01.888091 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:02.393207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:02.887610 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:03.388745 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:03.888344 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:04.388999 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:04.889012 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:05.390448 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:05.888198 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:06.395413 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:06.889275 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:07.387879 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:07.888183 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:08.388311 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:08.888612 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:09.388334 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:09.887931 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:10.387765 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:10.888317 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:11.388557 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:11.887439 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:12.388213 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:12.887810 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:13.388135 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:13.888239 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:14.388445 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:14.889102 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:15.388533 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:15.887383 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:16.388426 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:16.888022 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:17.388399 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:17.887327 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:18.388016 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:18.887470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:19.388533 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:19.889124 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:20.387631 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:20.888484 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:21.388124 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:21.887946 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:22.388268 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:22.887332 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:23.388224 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:23.887844 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:24.387744 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:24.888405 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:25.388231 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:25.888672 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:26.388063 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:26.888126 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:27.387865 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:27.887552 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:28.387806 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:28.887772 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:29.388587 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:29.888551 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:30.387903 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:30.887507 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:31.388609 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:31.888449 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:32.388259 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:32.887834 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:33.388141 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:33.888934 2064308 kapi.go:107] duration metric: took 2m30.504634261s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0916 10:33:33.890859 2064308 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-451841 cluster.
	I0916 10:33:33.892432 2064308 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0916 10:33:33.893920 2064308 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0916 10:33:33.895584 2064308 out.go:177] * Enabled addons: nvidia-device-plugin, storage-provisioner, ingress-dns, storage-provisioner-rancher, volcano, cloud-spanner, metrics-server, inspektor-gadget, yakd, default-storageclass, volumesnapshots, registry, csi-hostpath-driver, ingress, gcp-auth
	I0916 10:33:33.897279 2064308 addons.go:510] duration metric: took 2m43.689318504s for enable addons: enabled=[nvidia-device-plugin storage-provisioner ingress-dns storage-provisioner-rancher volcano cloud-spanner metrics-server inspektor-gadget yakd default-storageclass volumesnapshots registry csi-hostpath-driver ingress gcp-auth]
	I0916 10:33:33.897342 2064308 start.go:246] waiting for cluster config update ...
	I0916 10:33:33.897367 2064308 start.go:255] writing updated cluster config ...
	I0916 10:33:33.898186 2064308 ssh_runner.go:195] Run: rm -f paused
	I0916 10:33:33.906793 2064308 out.go:177] * Done! kubectl is now configured to use "addons-451841" cluster and "default" namespace by default
	E0916 10:33:33.908425 2064308 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                                     ATTEMPT             POD ID              POD
	4bf6f7c832fdd       4f725bf50aaa5       About a minute ago   Exited              gadget                                   6                   11fc6773b3e1a       gadget-wjwc2
	f90c8869604c5       6ef582f3ec844       5 minutes ago        Running             gcp-auth                                 0                   9038b6b53facd       gcp-auth-89d5ffd79-pw58v
	6f68aecec6aa2       8b46b1cd48760       6 minutes ago        Running             admission                                0                   e3af2951f3794       volcano-admission-77d7d48b68-sjxcs
	dd63136d8d6ac       289a818c8d9c5       6 minutes ago        Running             controller                               0                   b4699f942aa64       ingress-nginx-controller-bc57996ff-rqhcp
	a490639f0e8aa       ee6d597e62dc8       6 minutes ago        Running             csi-snapshotter                          0                   8260f57befdda       csi-hostpathplugin-r28vj
	317ce7462f733       642ded511e141       6 minutes ago        Running             csi-provisioner                          0                   8260f57befdda       csi-hostpathplugin-r28vj
	324d1501e94dc       420193b27261a       6 minutes ago        Exited              patch                                    2                   13d0568de4b9d       ingress-nginx-admission-patch-z2qc7
	7b9cc7b5195ab       922312104da8a       6 minutes ago        Running             liveness-probe                           0                   8260f57befdda       csi-hostpathplugin-r28vj
	bee97d004dc4d       08f6b2990811a       7 minutes ago        Running             hostpath                                 0                   8260f57befdda       csi-hostpathplugin-r28vj
	19ec6d5fbc0fd       8b46b1cd48760       7 minutes ago        Exited              main                                     0                   009b63594d8b2       volcano-admission-init-bz266
	000463bf50714       d9c7ad4c226bf       7 minutes ago        Running             volcano-scheduler                        0                   d9214c0e709d4       volcano-scheduler-576bc46687-xwjbn
	4dc42ec686d73       1505f556b3a7b       7 minutes ago        Running             volcano-controllers                      0                   343744c6dcf07       volcano-controllers-56675bb4d5-2ltwp
	30f0f6b13e6d5       420193b27261a       7 minutes ago        Exited              create                                   0                   993d0544f3868       ingress-nginx-admission-create-4vr4g
	32df8554c702e       4d1e5c3e97420       7 minutes ago        Running             volume-snapshot-controller               0                   864cff1eb40c5       snapshot-controller-56fcc65765-6llf9
	f28ea158892d3       5548a49bb60ba       7 minutes ago        Running             metrics-server                           0                   f49e908ac9969       metrics-server-84c5f94fbc-q47pm
	7558a63005c7b       0107d56dbc0be       7 minutes ago        Running             node-driver-registrar                    0                   8260f57befdda       csi-hostpathplugin-r28vj
	d181a00ffae8d       4d1e5c3e97420       7 minutes ago        Running             volume-snapshot-controller               0                   5f5c44341cf11       snapshot-controller-56fcc65765-qxvll
	6b271689ecd4e       7ce2150c8929b       7 minutes ago        Running             local-path-provisioner                   0                   de8add92893e8       local-path-provisioner-86d989889c-qkpm6
	bd1a124f1e30d       8be4bcf8ec607       7 minutes ago        Running             cloud-spanner-emulator                   0                   14a43ef4433bb       cloud-spanner-emulator-769b77f747-m5wld
	3f681021aea3a       a9bac31a5be8d       7 minutes ago        Running             nvidia-device-plugin-ctr                 0                   2ef7d8ec9c04f       nvidia-device-plugin-daemonset-l6r5c
	98b48c685a09e       487fa743e1e22       7 minutes ago        Running             csi-resizer                              0                   89cb8ade3231b       csi-hostpath-resizer-0
	2472144c5bc6d       1461903ec4fe9       7 minutes ago        Running             csi-external-health-monitor-controller   0                   8260f57befdda       csi-hostpathplugin-r28vj
	0af6491cd95ee       9a80d518f102c       7 minutes ago        Running             csi-attacher                             0                   0d39436266817       csi-hostpath-attacher-0
	9b811a5c5e80c       35508c2f890c4       7 minutes ago        Running             minikube-ingress-dns                     0                   e1ed027bac8d8       kube-ingress-dns-minikube
	5232ad6b096cb       2f6c962e7b831       7 minutes ago        Running             coredns                                  0                   3ad39eb105298       coredns-7c65d6cfc9-jqthn
	4ddb5fa614111       ba04bb24b9575       7 minutes ago        Running             storage-provisioner                      0                   d0cffc65c18c1       storage-provisioner
	64b671b165f6f       6a23fa8fd2b78       7 minutes ago        Running             kindnet-cni                              0                   bd9ef3e1818e4       kindnet-zckxr
	35987f39fe9ef       24a140c548c07       7 minutes ago        Running             kube-proxy                               0                   6a8ebbdde94be       kube-proxy-tltkn
	8769c148a0bb3       27e3830e14027       8 minutes ago        Running             etcd                                     0                   290d52892953c       etcd-addons-451841
	31da3c8e5867c       279f381cb3736       8 minutes ago        Running             kube-controller-manager                  0                   349d5195292e8       kube-controller-manager-addons-451841
	808425f96a229       7f8aa378bb47d       8 minutes ago        Running             kube-scheduler                           0                   50415da17c7f0       kube-scheduler-addons-451841
	2870b9699fd97       d3f53a98c0a9d       8 minutes ago        Running             kube-apiserver                           0                   1d8868dd2cf0d       kube-apiserver-addons-451841
	
	
	==> containerd <==
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.888290545Z" level=info msg="StopContainer for \"d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df\" returns successfully"
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.899817443Z" level=info msg="StopPodSandbox for \"82bcc82f48993503ebd93392cb512719616d3407ccd539baa6173c4db7a544f7\""
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.900394243Z" level=info msg="Container to stop \"d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df\" must be in running or unknown state, current state \"CONTAINER_EXITED\""
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.922156896Z" level=info msg="shim disconnected" id=45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99 namespace=k8s.io
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.922237347Z" level=warning msg="cleaning up after shim disconnected" id=45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99 namespace=k8s.io
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.922248645Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.970862401Z" level=info msg="StopContainer for \"45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99\" returns successfully"
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.971800258Z" level=info msg="StopPodSandbox for \"7b57eb1530381cfc5adaeeaa270914f27a98fab53781dcbeac65e9d51741b5c7\""
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.971884187Z" level=info msg="Container to stop \"45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99\" must be in running or unknown state, current state \"CONTAINER_EXITED\""
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.981458614Z" level=info msg="shim disconnected" id=82bcc82f48993503ebd93392cb512719616d3407ccd539baa6173c4db7a544f7 namespace=k8s.io
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.981658063Z" level=warning msg="cleaning up after shim disconnected" id=82bcc82f48993503ebd93392cb512719616d3407ccd539baa6173c4db7a544f7 namespace=k8s.io
	Sep 16 10:38:43 addons-451841 containerd[816]: time="2024-09-16T10:38:43.981745562Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.053475931Z" level=info msg="shim disconnected" id=7b57eb1530381cfc5adaeeaa270914f27a98fab53781dcbeac65e9d51741b5c7 namespace=k8s.io
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.053751449Z" level=warning msg="cleaning up after shim disconnected" id=7b57eb1530381cfc5adaeeaa270914f27a98fab53781dcbeac65e9d51741b5c7 namespace=k8s.io
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.053851494Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.056189685Z" level=info msg="TearDown network for sandbox \"82bcc82f48993503ebd93392cb512719616d3407ccd539baa6173c4db7a544f7\" successfully"
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.056342816Z" level=info msg="StopPodSandbox for \"82bcc82f48993503ebd93392cb512719616d3407ccd539baa6173c4db7a544f7\" returns successfully"
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.167632364Z" level=info msg="TearDown network for sandbox \"7b57eb1530381cfc5adaeeaa270914f27a98fab53781dcbeac65e9d51741b5c7\" successfully"
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.167673045Z" level=info msg="StopPodSandbox for \"7b57eb1530381cfc5adaeeaa270914f27a98fab53781dcbeac65e9d51741b5c7\" returns successfully"
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.339330798Z" level=info msg="RemoveContainer for \"45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99\""
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.348919198Z" level=info msg="RemoveContainer for \"45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99\" returns successfully"
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.352719402Z" level=error msg="ContainerStatus for \"45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99\" failed" error="rpc error: code = NotFound desc = an error occurred when try to find container \"45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99\": not found"
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.358888551Z" level=info msg="RemoveContainer for \"d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df\""
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.369755473Z" level=info msg="RemoveContainer for \"d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df\" returns successfully"
	Sep 16 10:38:44 addons-451841 containerd[816]: time="2024-09-16T10:38:44.370594853Z" level=error msg="ContainerStatus for \"d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df\" failed" error="rpc error: code = NotFound desc = an error occurred when try to find container \"d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df\": not found"
	
	
	==> coredns [5232ad6b096cb39cf18a9c11e936d3dae11b081bd6666741f3c42e78161ed09f] <==
	[INFO] 10.244.0.9:45725 - 37874 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000066576s
	[INFO] 10.244.0.9:59523 - 16440 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002202223s
	[INFO] 10.244.0.9:59523 - 22330 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002459412s
	[INFO] 10.244.0.9:50469 - 36811 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000147232s
	[INFO] 10.244.0.9:50469 - 6599 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000154395s
	[INFO] 10.244.0.9:54670 - 20364 "AAAA IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000113739s
	[INFO] 10.244.0.9:54670 - 51376 "A IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000392596s
	[INFO] 10.244.0.9:37135 - 16205 "A IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000064189s
	[INFO] 10.244.0.9:37135 - 64832 "AAAA IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.00005714s
	[INFO] 10.244.0.9:54223 - 7962 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000052168s
	[INFO] 10.244.0.9:54223 - 14360 "AAAA IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000048632s
	[INFO] 10.244.0.9:33840 - 38805 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001404552s
	[INFO] 10.244.0.9:33840 - 4752 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.00164702s
	[INFO] 10.244.0.9:45027 - 58736 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000075766s
	[INFO] 10.244.0.9:45027 - 39026 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000213093s
	[INFO] 10.244.0.24:51483 - 10090 "AAAA IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000202231s
	[INFO] 10.244.0.24:42195 - 64926 "A IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000154649s
	[INFO] 10.244.0.24:32892 - 59527 "AAAA IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000163215s
	[INFO] 10.244.0.24:47611 - 11902 "A IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000096418s
	[INFO] 10.244.0.24:59950 - 37722 "AAAA IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.00008466s
	[INFO] 10.244.0.24:52002 - 29131 "A IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000089403s
	[INFO] 10.244.0.24:38598 - 65011 "AAAA IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.002266241s
	[INFO] 10.244.0.24:60458 - 11928 "A IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.001809759s
	[INFO] 10.244.0.24:43975 - 30277 "A IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 610 0.001862182s
	[INFO] 10.244.0.24:51154 - 58482 "AAAA IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 240 0.002138832s
	
	
	==> describe nodes <==
	Name:               addons-451841
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-451841
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=addons-451841
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_30_46_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-451841
	Annotations:        csi.volume.kubernetes.io/nodeid: {"hostpath.csi.k8s.io":"addons-451841"}
	                    kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:30:42 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-451841
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:38:35 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:33:49 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:33:49 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:33:49 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:33:49 +0000   Mon, 16 Sep 2024 10:30:42 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-451841
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 003b87cb77e5465aa882d8df5f5cd5ab
	  System UUID:                21a29522-aef6-4d70-a29b-0ea27731fdbe
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (24 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     cloud-spanner-emulator-769b77f747-m5wld     0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m52s
	  gadget                      gadget-wjwc2                                0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m49s
	  gcp-auth                    gcp-auth-89d5ffd79-pw58v                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         6m20s
	  ingress-nginx               ingress-nginx-controller-bc57996ff-rqhcp    100m (5%)     0 (0%)      90Mi (1%)        0 (0%)         7m47s
	  kube-system                 coredns-7c65d6cfc9-jqthn                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     7m55s
	  kube-system                 csi-hostpath-attacher-0                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m44s
	  kube-system                 csi-hostpath-resizer-0                      0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m44s
	  kube-system                 csi-hostpathplugin-r28vj                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m44s
	  kube-system                 etcd-addons-451841                          100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         8m
	  kube-system                 kindnet-zckxr                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      7m56s
	  kube-system                 kube-apiserver-addons-451841                250m (12%)    0 (0%)      0 (0%)           0 (0%)         8m
	  kube-system                 kube-controller-manager-addons-451841       200m (10%)    0 (0%)      0 (0%)           0 (0%)         8m1s
	  kube-system                 kube-ingress-dns-minikube                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m51s
	  kube-system                 kube-proxy-tltkn                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m56s
	  kube-system                 kube-scheduler-addons-451841                100m (5%)     0 (0%)      0 (0%)           0 (0%)         8m
	  kube-system                 metrics-server-84c5f94fbc-q47pm             100m (5%)     0 (0%)      200Mi (2%)       0 (0%)         7m50s
	  kube-system                 nvidia-device-plugin-daemonset-l6r5c        0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m53s
	  kube-system                 snapshot-controller-56fcc65765-6llf9        0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m48s
	  kube-system                 snapshot-controller-56fcc65765-qxvll        0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m48s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m50s
	  local-path-storage          local-path-provisioner-86d989889c-qkpm6     0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m50s
	  volcano-system              volcano-admission-77d7d48b68-sjxcs          0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m46s
	  volcano-system              volcano-controllers-56675bb4d5-2ltwp        0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m45s
	  volcano-system              volcano-scheduler-576bc46687-xwjbn          0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m45s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                1050m (52%)  100m (5%)
	  memory             510Mi (6%)   220Mi (2%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-1Gi      0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	  hugepages-32Mi     0 (0%)       0 (0%)
	  hugepages-64Ki     0 (0%)       0 (0%)
	Events:
	  Type     Reason                   Age                  From             Message
	  ----     ------                   ----                 ----             -------
	  Normal   Starting                 7m54s                kube-proxy       
	  Normal   NodeAllocatableEnforced  8m8s                 kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 8m8s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  8m8s (x8 over 8m8s)  kubelet          Node addons-451841 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    8m8s (x7 over 8m8s)  kubelet          Node addons-451841 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     8m8s (x7 over 8m8s)  kubelet          Node addons-451841 status is now: NodeHasSufficientPID
	  Normal   Starting                 8m8s                 kubelet          Starting kubelet.
	  Normal   Starting                 8m                   kubelet          Starting kubelet.
	  Warning  CgroupV1                 8m                   kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  8m                   kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  8m                   kubelet          Node addons-451841 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    8m                   kubelet          Node addons-451841 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     8m                   kubelet          Node addons-451841 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           7m57s                node-controller  Node addons-451841 event: Registered Node addons-451841 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [8769c148a0bb341cc1dcca117d41b6be795d52ed6e49348d14da26aac1d42f01] <==
	{"level":"info","ts":"2024-09-16T10:30:38.620397Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:30:38.620645Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:30:38.620676Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:30:38.620740Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:30:38.620758Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:30:39.574728Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.574952Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.575045Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.575117Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575193Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575240Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575326Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.581756Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:addons-451841 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:30:39.582006Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:30:39.582134Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:30:39.582418Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:30:39.582525Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:30:39.582434Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.583536Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:30:39.584617Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:30:39.585041Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.590779Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.590980Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.591021Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:30:39.592081Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	
	
	==> gcp-auth [f90c8869604c54edfd93d5ef8e6467ed81e6a63fbedf9c5712f155d5d85f40b8] <==
	2024/09/16 10:33:32 GCP Auth Webhook started!
	
	
	==> kernel <==
	 10:38:45 up 1 day, 14:21,  0 users,  load average: 0.52, 0.88, 1.73
	Linux addons-451841 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [64b671b165f6f7bb28b281ddd3fe708221407f35f09389c964253f52887fd626] <==
	I0916 10:36:41.720987       1 main.go:299] handling current node
	I0916 10:36:51.720937       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:36:51.720972       1 main.go:299] handling current node
	I0916 10:37:01.727834       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:01.727867       1 main.go:299] handling current node
	I0916 10:37:11.729432       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:11.729465       1 main.go:299] handling current node
	I0916 10:37:21.720904       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:21.720938       1 main.go:299] handling current node
	I0916 10:37:31.722487       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:31.722584       1 main.go:299] handling current node
	I0916 10:37:41.720891       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:41.720925       1 main.go:299] handling current node
	I0916 10:37:51.720989       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:37:51.721216       1 main.go:299] handling current node
	I0916 10:38:01.725211       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:01.725246       1 main.go:299] handling current node
	I0916 10:38:11.726624       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:11.726661       1 main.go:299] handling current node
	I0916 10:38:21.720890       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:21.720920       1 main.go:299] handling current node
	I0916 10:38:31.726782       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:31.726815       1 main.go:299] handling current node
	I0916 10:38:41.722754       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:38:41.722856       1 main.go:299] handling current node
	
	
	==> kube-apiserver [2870b9699fd97d290c5750a6361bd1eb6ac986ce8fb7e3f9eb6474155c6b1fa8] <==
	W0916 10:32:01.434298       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:02.466465       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:03.481058       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:04.547641       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:05.602664       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.287206       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:06.287255       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:32:06.288974       1 dispatcher.go:225] Failed calling webhook, failing closed mutatepod.volcano.sh: failed calling webhook "mutatepod.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/pods/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.354223       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:06.354265       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:32:06.355906       1 dispatcher.go:225] Failed calling webhook, failing closed mutatepod.volcano.sh: failed calling webhook "mutatepod.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/pods/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.623074       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:07.661520       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:08.760490       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:09.841622       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:10.917089       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:11.983956       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:13.046405       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:14.089526       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:25.289607       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:25.289652       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:33:06.298609       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:33:06.298665       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:33:06.363205       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:33:06.363253       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	
	
	==> kube-controller-manager [31da3c8e5867c3e2a6f4592fba3d201359a6c0c862a2620157496149c91a3b11] <==
	I0916 10:33:06.336091       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:06.351384       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:06.375003       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:06.382571       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:06.391010       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:06.403147       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:07.408707       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:07.423579       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:08.523312       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:08.551171       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:09.531505       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:09.540649       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:09.549169       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="1s"
	I0916 10:33:09.560618       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:09.569582       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:09.577256       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:33.504028       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="13.523761ms"
	I0916 10:33:33.504493       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="52.742µs"
	I0916 10:33:39.026367       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:33:39.035647       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:33:39.082659       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:33:39.087612       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:33:49.934314       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	I0916 10:38:37.530598       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="yakd-dashboard/yakd-dashboard-67d98fc6b" duration="21.727µs"
	I0916 10:38:43.718435       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/registry-66c9cd494c" duration="10.191µs"
	
	
	==> kube-proxy [35987f39fe9efffcbcdfe8a1694d2541bd561939f35f2770e06a09f005dcf753] <==
	I0916 10:30:51.148935       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:30:51.266541       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:30:51.266602       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:30:51.307434       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:30:51.307506       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:30:51.310004       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:30:51.310401       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:30:51.310420       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:30:51.312344       1 config.go:199] "Starting service config controller"
	I0916 10:30:51.312371       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:30:51.312398       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:30:51.312403       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:30:51.315085       1 config.go:328] "Starting node config controller"
	I0916 10:30:51.315100       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:30:51.413119       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:30:51.413177       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:30:51.415238       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [808425f96a2291f8e0cf3dfea11339a46bc25f8b4e1f82c29efc8eee8e1d729a] <==
	W0916 10:30:43.815016       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:30:43.815095       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.815504       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.815602       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.815794       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:30:43.815882       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.816048       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:30:43.816126       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.816295       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.817022       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817307       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:30:43.817404       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817601       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.817688       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817801       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:30:43.818028       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817989       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:30:43.818395       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.818315       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 10:30:43.818847       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.818381       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 10:30:43.819065       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.819318       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:30:43.819478       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:30:44.999991       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:38:37 addons-451841 kubelet[1517]: I0916 10:38:37.856874    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3884eda5-248f-48c2-87cc-ddd844445abb-kube-api-access-pgx97" (OuterVolumeSpecName: "kube-api-access-pgx97") pod "3884eda5-248f-48c2-87cc-ddd844445abb" (UID: "3884eda5-248f-48c2-87cc-ddd844445abb"). InnerVolumeSpecName "kube-api-access-pgx97". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:38:37 addons-451841 kubelet[1517]: I0916 10:38:37.956050    1517 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-pgx97\" (UniqueName: \"kubernetes.io/projected/3884eda5-248f-48c2-87cc-ddd844445abb-kube-api-access-pgx97\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:38:38 addons-451841 kubelet[1517]: I0916 10:38:38.295759    1517 scope.go:117] "RemoveContainer" containerID="952cd1e0c1c93af7f504c2b6f14087e5b5fb821f14aa18db0c2be01c3eb5ea16"
	Sep 16 10:38:38 addons-451841 kubelet[1517]: I0916 10:38:38.306628    1517 scope.go:117] "RemoveContainer" containerID="952cd1e0c1c93af7f504c2b6f14087e5b5fb821f14aa18db0c2be01c3eb5ea16"
	Sep 16 10:38:38 addons-451841 kubelet[1517]: E0916 10:38:38.307913    1517 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = an error occurred when try to find container \"952cd1e0c1c93af7f504c2b6f14087e5b5fb821f14aa18db0c2be01c3eb5ea16\": not found" containerID="952cd1e0c1c93af7f504c2b6f14087e5b5fb821f14aa18db0c2be01c3eb5ea16"
	Sep 16 10:38:38 addons-451841 kubelet[1517]: I0916 10:38:38.307946    1517 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"952cd1e0c1c93af7f504c2b6f14087e5b5fb821f14aa18db0c2be01c3eb5ea16"} err="failed to get container status \"952cd1e0c1c93af7f504c2b6f14087e5b5fb821f14aa18db0c2be01c3eb5ea16\": rpc error: code = NotFound desc = an error occurred when try to find container \"952cd1e0c1c93af7f504c2b6f14087e5b5fb821f14aa18db0c2be01c3eb5ea16\": not found"
	Sep 16 10:38:39 addons-451841 kubelet[1517]: I0916 10:38:39.525746    1517 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3884eda5-248f-48c2-87cc-ddd844445abb" path="/var/lib/kubelet/pods/3884eda5-248f-48c2-87cc-ddd844445abb/volumes"
	Sep 16 10:38:42 addons-451841 kubelet[1517]: I0916 10:38:42.522360    1517 scope.go:117] "RemoveContainer" containerID="4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c"
	Sep 16 10:38:42 addons-451841 kubelet[1517]: E0916 10:38:42.522547    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.106122    1517 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gl759\" (UniqueName: \"kubernetes.io/projected/17af08bf-9965-4bec-8d1b-0c4c37167ac1-kube-api-access-gl759\") pod \"17af08bf-9965-4bec-8d1b-0c4c37167ac1\" (UID: \"17af08bf-9965-4bec-8d1b-0c4c37167ac1\") "
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.108181    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17af08bf-9965-4bec-8d1b-0c4c37167ac1-kube-api-access-gl759" (OuterVolumeSpecName: "kube-api-access-gl759") pod "17af08bf-9965-4bec-8d1b-0c4c37167ac1" (UID: "17af08bf-9965-4bec-8d1b-0c4c37167ac1"). InnerVolumeSpecName "kube-api-access-gl759". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.207393    1517 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-gl759\" (UniqueName: \"kubernetes.io/projected/17af08bf-9965-4bec-8d1b-0c4c37167ac1-kube-api-access-gl759\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.308471    1517 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fx62l\" (UniqueName: \"kubernetes.io/projected/10361d7a-9abb-41e6-88eb-2194d01e1301-kube-api-access-fx62l\") pod \"10361d7a-9abb-41e6-88eb-2194d01e1301\" (UID: \"10361d7a-9abb-41e6-88eb-2194d01e1301\") "
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.312019    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10361d7a-9abb-41e6-88eb-2194d01e1301-kube-api-access-fx62l" (OuterVolumeSpecName: "kube-api-access-fx62l") pod "10361d7a-9abb-41e6-88eb-2194d01e1301" (UID: "10361d7a-9abb-41e6-88eb-2194d01e1301"). InnerVolumeSpecName "kube-api-access-fx62l". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.334970    1517 scope.go:117] "RemoveContainer" containerID="45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99"
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.352296    1517 scope.go:117] "RemoveContainer" containerID="45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99"
	Sep 16 10:38:44 addons-451841 kubelet[1517]: E0916 10:38:44.353034    1517 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = an error occurred when try to find container \"45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99\": not found" containerID="45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99"
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.353147    1517 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99"} err="failed to get container status \"45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99\": rpc error: code = NotFound desc = an error occurred when try to find container \"45dcb038c40ff68181b8af83613f23e1aa4e7a123ce6bf23d96c543b5bf6bc99\": not found"
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.353255    1517 scope.go:117] "RemoveContainer" containerID="d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df"
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.370188    1517 scope.go:117] "RemoveContainer" containerID="d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df"
	Sep 16 10:38:44 addons-451841 kubelet[1517]: E0916 10:38:44.371637    1517 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = an error occurred when try to find container \"d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df\": not found" containerID="d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df"
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.371783    1517 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df"} err="failed to get container status \"d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df\": rpc error: code = NotFound desc = an error occurred when try to find container \"d221c581079ff3e0a9a4784669990de31c5f47ed658aad778df8fe0358ae46df\": not found"
	Sep 16 10:38:44 addons-451841 kubelet[1517]: I0916 10:38:44.410405    1517 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-fx62l\" (UniqueName: \"kubernetes.io/projected/10361d7a-9abb-41e6-88eb-2194d01e1301-kube-api-access-fx62l\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:38:45 addons-451841 kubelet[1517]: I0916 10:38:45.531215    1517 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10361d7a-9abb-41e6-88eb-2194d01e1301" path="/var/lib/kubelet/pods/10361d7a-9abb-41e6-88eb-2194d01e1301/volumes"
	Sep 16 10:38:45 addons-451841 kubelet[1517]: I0916 10:38:45.533469    1517 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17af08bf-9965-4bec-8d1b-0c4c37167ac1" path="/var/lib/kubelet/pods/17af08bf-9965-4bec-8d1b-0c4c37167ac1/volumes"
	
	
	==> storage-provisioner [4ddb5fa614111a21d93d580947f3eb3b791d38fa6e497e66ae259ff6bb7fed15] <==
	I0916 10:30:56.265937       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:30:56.289948       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:30:56.290011       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:30:56.319402       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:30:56.319890       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"ef239dcc-ec3a-4a4d-b0db-6d9c8de888a1", APIVersion:"v1", ResourceVersion:"601", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157 became leader
	I0916 10:30:56.319948       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157!
	I0916 10:30:56.520389       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-451841 -n addons-451841
helpers_test.go:261: (dbg) Run:  kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (557.707µs)
helpers_test.go:263: kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/parallel/Registry (15.67s)

                                                
                                    
x
+
TestAddons/parallel/Ingress (2.38s)

                                                
                                                
=== RUN   TestAddons/parallel/Ingress
=== PAUSE TestAddons/parallel/Ingress

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Ingress
addons_test.go:209: (dbg) Run:  kubectl --context addons-451841 wait --for=condition=ready --namespace=ingress-nginx pod --selector=app.kubernetes.io/component=controller --timeout=90s
addons_test.go:209: (dbg) Non-zero exit: kubectl --context addons-451841 wait --for=condition=ready --namespace=ingress-nginx pod --selector=app.kubernetes.io/component=controller --timeout=90s: fork/exec /usr/local/bin/kubectl: exec format error (606.929µs)
addons_test.go:210: failed waiting for ingress-nginx-controller : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/parallel/Ingress]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-451841
helpers_test.go:235: (dbg) docker inspect addons-451841:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4",
	        "Created": "2024-09-16T10:30:19.386072283Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2064804,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:30:19.514500967Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/hostname",
	        "HostsPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/hosts",
	        "LogPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4-json.log",
	        "Name": "/addons-451841",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-451841:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-451841",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/merged",
	                "UpperDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/diff",
	                "WorkDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-451841",
	                "Source": "/var/lib/docker/volumes/addons-451841/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-451841",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-451841",
	                "name.minikube.sigs.k8s.io": "addons-451841",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "4da7b9dd4db914ae48304dba9ae2b2fb9dab68040bc986bf2751a778e62e4524",
	            "SandboxKey": "/var/run/docker/netns/4da7b9dd4db9",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40577"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40578"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40581"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40579"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40580"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-451841": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "cd1315f485e3267c82ac80908081e901323e720ef1bb26de92d612c54dfd58d8",
	                    "EndpointID": "36f212e2a713c67d6c2ea54e50fbd0d8d7f7eb862ef913caa03a6cbfac71cb21",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-451841",
	                        "8a213d4c4dec"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-451841 -n addons-451841
helpers_test.go:244: <<< TestAddons/parallel/Ingress FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/parallel/Ingress]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-451841 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-451841 logs -n 25: (1.501191671s)
helpers_test.go:252: TestAddons/parallel/Ingress logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                 Args                 |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC |                     |
	|         | -p download-only-911311              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0         |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-911311              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | -o=json --download-only              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | -p download-only-889126              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.31.1         |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-889126              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-911311              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-889126              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | --download-only -p                   | download-docker-956530 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | download-docker-956530               |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | -p download-docker-956530            | download-docker-956530 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | --download-only -p                   | binary-mirror-852743   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | binary-mirror-852743                 |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --binary-mirror                      |                        |         |         |                     |                     |
	|         | http://127.0.0.1:35351               |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-852743              | binary-mirror-852743   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| addons  | disable dashboard -p                 | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| addons  | enable dashboard -p                  | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| start   | -p addons-451841 --wait=true         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:33 UTC |
	|         | --memory=4000 --alsologtostderr      |                        |         |         |                     |                     |
	|         | --addons=registry                    |                        |         |         |                     |                     |
	|         | --addons=metrics-server              |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots             |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver         |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                    |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner               |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget            |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin        |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --addons=ingress                     |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                 |                        |         |         |                     |                     |
	| addons  | addons-451841 addons disable         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | yakd --alsologtostderr -v=1          |                        |         |         |                     |                     |
	| ip      | addons-451841 ip                     | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	| addons  | addons-451841 addons disable         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | registry --alsologtostderr           |                        |         |         |                     |                     |
	|         | -v=1                                 |                        |         |         |                     |                     |
	| addons  | disable nvidia-device-plugin         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | -p addons-451841                     |                        |         |         |                     |                     |
	| addons  | disable cloud-spanner -p             | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| addons  | enable headlamp                      | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | -p addons-451841                     |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1               |                        |         |         |                     |                     |
	| addons  | addons-451841 addons disable         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:39 UTC | 16 Sep 24 10:39 UTC |
	|         | headlamp --alsologtostderr           |                        |         |         |                     |                     |
	|         | -v=1                                 |                        |         |         |                     |                     |
	| addons  | disable inspektor-gadget -p          | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:44 UTC | 16 Sep 24 10:44 UTC |
	|         | addons-451841                        |                        |         |         |                     |                     |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:29:55
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:29:55.756900 2064308 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:29:55.757118 2064308 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:55.757146 2064308 out.go:358] Setting ErrFile to fd 2...
	I0916 10:29:55.757164 2064308 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:55.757443 2064308 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:29:55.757918 2064308 out.go:352] Setting JSON to false
	I0916 10:29:55.758950 2064308 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":137538,"bootTime":1726345058,"procs":192,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:29:55.759050 2064308 start.go:139] virtualization:  
	I0916 10:29:55.762450 2064308 out.go:177] * [addons-451841] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:29:55.765218 2064308 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:29:55.765320 2064308 notify.go:220] Checking for updates...
	I0916 10:29:55.771607 2064308 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:29:55.774426 2064308 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:29:55.777761 2064308 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:29:55.780330 2064308 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:29:55.782904 2064308 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:29:55.785688 2064308 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:29:55.807382 2064308 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:29:55.807515 2064308 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:55.863178 2064308 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:49 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:55.853088898 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:55.863303 2064308 docker.go:318] overlay module found
	I0916 10:29:55.867792 2064308 out.go:177] * Using the docker driver based on user configuration
	I0916 10:29:55.870461 2064308 start.go:297] selected driver: docker
	I0916 10:29:55.870476 2064308 start.go:901] validating driver "docker" against <nil>
	I0916 10:29:55.870490 2064308 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:29:55.871367 2064308 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:55.922454 2064308 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:49 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:55.912678011 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:55.922666 2064308 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:29:55.922995 2064308 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:29:55.925501 2064308 out.go:177] * Using Docker driver with root privileges
	I0916 10:29:55.928402 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:29:55.928468 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:29:55.928481 2064308 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:29:55.928561 2064308 start.go:340] cluster config:
	{Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHA
uthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:29:55.931349 2064308 out.go:177] * Starting "addons-451841" primary control-plane node in "addons-451841" cluster
	I0916 10:29:55.933847 2064308 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:29:55.936549 2064308 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:29:55.939027 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:29:55.939075 2064308 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:29:55.939087 2064308 cache.go:56] Caching tarball of preloaded images
	I0916 10:29:55.939127 2064308 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:29:55.939172 2064308 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:29:55.939183 2064308 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:29:55.939554 2064308 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json ...
	I0916 10:29:55.939585 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json: {Name:mk4b86ccd0e04a15f77246bcc432382e6ef83bd3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:29:55.955829 2064308 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:29:55.955957 2064308 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:29:55.955999 2064308 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:29:55.956009 2064308 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:29:55.956017 2064308 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:29:55.956025 2064308 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:30:13.033213 2064308 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:30:13.033255 2064308 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:30:13.033286 2064308 start.go:360] acquireMachinesLock for addons-451841: {Name:mk3e70771a060125a26a792bbbf3ad5672ad97bd Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:30:13.033421 2064308 start.go:364] duration metric: took 111.614µs to acquireMachinesLock for "addons-451841"
	I0916 10:30:13.033454 2064308 start.go:93] Provisioning new machine with config: &{Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fa
lse CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:30:13.033622 2064308 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:30:13.035916 2064308 out.go:235] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0916 10:30:13.036188 2064308 start.go:159] libmachine.API.Create for "addons-451841" (driver="docker")
	I0916 10:30:13.036228 2064308 client.go:168] LocalClient.Create starting
	I0916 10:30:13.036363 2064308 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 10:30:13.386329 2064308 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 10:30:13.561829 2064308 cli_runner.go:164] Run: docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:30:13.576129 2064308 cli_runner.go:211] docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:30:13.576212 2064308 network_create.go:284] running [docker network inspect addons-451841] to gather additional debugging logs...
	I0916 10:30:13.576235 2064308 cli_runner.go:164] Run: docker network inspect addons-451841
	W0916 10:30:13.591552 2064308 cli_runner.go:211] docker network inspect addons-451841 returned with exit code 1
	I0916 10:30:13.591606 2064308 network_create.go:287] error running [docker network inspect addons-451841]: docker network inspect addons-451841: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-451841 not found
	I0916 10:30:13.591621 2064308 network_create.go:289] output of [docker network inspect addons-451841]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-451841 not found
	
	** /stderr **
	I0916 10:30:13.591720 2064308 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:30:13.608306 2064308 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001aeacb0}
	I0916 10:30:13.608356 2064308 network_create.go:124] attempt to create docker network addons-451841 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:30:13.608420 2064308 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-451841 addons-451841
	I0916 10:30:13.683378 2064308 network_create.go:108] docker network addons-451841 192.168.49.0/24 created
	I0916 10:30:13.683411 2064308 kic.go:121] calculated static IP "192.168.49.2" for the "addons-451841" container
	I0916 10:30:13.683492 2064308 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:30:13.700184 2064308 cli_runner.go:164] Run: docker volume create addons-451841 --label name.minikube.sigs.k8s.io=addons-451841 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:30:13.718068 2064308 oci.go:103] Successfully created a docker volume addons-451841
	I0916 10:30:13.718179 2064308 cli_runner.go:164] Run: docker run --rm --name addons-451841-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --entrypoint /usr/bin/test -v addons-451841:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:30:15.421383 2064308 cli_runner.go:217] Completed: docker run --rm --name addons-451841-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --entrypoint /usr/bin/test -v addons-451841:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib: (1.703150378s)
	I0916 10:30:15.421417 2064308 oci.go:107] Successfully prepared a docker volume addons-451841
	I0916 10:30:15.421439 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:30:15.421458 2064308 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:30:15.421522 2064308 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-451841:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:30:19.320511 2064308 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-451841:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (3.898937148s)
	I0916 10:30:19.320548 2064308 kic.go:203] duration metric: took 3.899086612s to extract preloaded images to volume ...
	W0916 10:30:19.320695 2064308 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:30:19.320803 2064308 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:30:19.371670 2064308 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-451841 --name addons-451841 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-451841 --network addons-451841 --ip 192.168.49.2 --volume addons-451841:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:30:19.674459 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Running}}
	I0916 10:30:19.700795 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:19.725169 2064308 cli_runner.go:164] Run: docker exec addons-451841 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:30:19.777409 2064308 oci.go:144] the created container "addons-451841" has a running status.
	I0916 10:30:19.777438 2064308 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa...
	I0916 10:30:20.426549 2064308 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:30:20.459111 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:20.485764 2064308 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:30:20.485788 2064308 kic_runner.go:114] Args: [docker exec --privileged addons-451841 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:30:20.553044 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:20.584488 2064308 machine.go:93] provisionDockerMachine start ...
	I0916 10:30:20.584585 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.604705 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.605002 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.605024 2064308 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:30:20.750295 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-451841
	
	I0916 10:30:20.750323 2064308 ubuntu.go:169] provisioning hostname "addons-451841"
	I0916 10:30:20.750394 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.772671 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.772910 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.772922 2064308 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-451841 && echo "addons-451841" | sudo tee /etc/hostname
	I0916 10:30:20.923316 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-451841
	
	I0916 10:30:20.923448 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.940021 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.940274 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.940298 2064308 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-451841' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-451841/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-451841' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:30:21.087110 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:30:21.087184 2064308 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:30:21.087263 2064308 ubuntu.go:177] setting up certificates
	I0916 10:30:21.087293 2064308 provision.go:84] configureAuth start
	I0916 10:30:21.087450 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.105254 2064308 provision.go:143] copyHostCerts
	I0916 10:30:21.105342 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:30:21.105468 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:30:21.105537 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:30:21.105585 2064308 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.addons-451841 san=[127.0.0.1 192.168.49.2 addons-451841 localhost minikube]
	I0916 10:30:21.497343 2064308 provision.go:177] copyRemoteCerts
	I0916 10:30:21.497413 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:30:21.497456 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.514957 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.611658 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:30:21.636890 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:30:21.662172 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:30:21.686808 2064308 provision.go:87] duration metric: took 599.477164ms to configureAuth
	I0916 10:30:21.686873 2064308 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:30:21.687116 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:21.687133 2064308 machine.go:96] duration metric: took 1.102625588s to provisionDockerMachine
	I0916 10:30:21.687141 2064308 client.go:171] duration metric: took 8.650903893s to LocalClient.Create
	I0916 10:30:21.687161 2064308 start.go:167] duration metric: took 8.650974974s to libmachine.API.Create "addons-451841"
	I0916 10:30:21.687171 2064308 start.go:293] postStartSetup for "addons-451841" (driver="docker")
	I0916 10:30:21.687182 2064308 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:30:21.687249 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:30:21.687299 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.706431 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.804065 2064308 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:30:21.807409 2064308 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:30:21.807450 2064308 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:30:21.807462 2064308 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:30:21.807470 2064308 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:30:21.807482 2064308 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:30:21.807551 2064308 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:30:21.807581 2064308 start.go:296] duration metric: took 120.403063ms for postStartSetup
	I0916 10:30:21.807904 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.824820 2064308 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json ...
	I0916 10:30:21.825120 2064308 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:30:21.825171 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.841557 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.935711 2064308 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:30:21.940289 2064308 start.go:128] duration metric: took 8.906649729s to createHost
	I0916 10:30:21.940328 2064308 start.go:83] releasing machines lock for "addons-451841", held for 8.906892895s
	I0916 10:30:21.940401 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.957512 2064308 ssh_runner.go:195] Run: cat /version.json
	I0916 10:30:21.957582 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.957842 2064308 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:30:21.957901 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.986070 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.992358 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:22.209233 2064308 ssh_runner.go:195] Run: systemctl --version
	I0916 10:30:22.213896 2064308 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:30:22.218111 2064308 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:30:22.243931 2064308 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:30:22.244032 2064308 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:30:22.274074 2064308 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:30:22.274104 2064308 start.go:495] detecting cgroup driver to use...
	I0916 10:30:22.274139 2064308 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:30:22.274194 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:30:22.287113 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:30:22.299302 2064308 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:30:22.299412 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:30:22.313515 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:30:22.327839 2064308 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:30:22.409410 2064308 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:30:22.494962 2064308 docker.go:233] disabling docker service ...
	I0916 10:30:22.495100 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:30:22.515205 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:30:22.527495 2064308 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:30:22.611444 2064308 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:30:22.705471 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:30:22.717496 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:30:22.735435 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:30:22.746124 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:30:22.757226 2064308 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:30:22.757299 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:30:22.767541 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:30:22.779039 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:30:22.788821 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:30:22.799244 2064308 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:30:22.808704 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:30:22.820713 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:30:22.831851 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:30:22.842394 2064308 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:30:22.851545 2064308 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:30:22.860424 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:22.961475 2064308 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:30:23.100987 2064308 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:30:23.101138 2064308 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:30:23.105001 2064308 start.go:563] Will wait 60s for crictl version
	I0916 10:30:23.105079 2064308 ssh_runner.go:195] Run: which crictl
	I0916 10:30:23.108696 2064308 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:30:23.154724 2064308 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:30:23.154812 2064308 ssh_runner.go:195] Run: containerd --version
	I0916 10:30:23.179902 2064308 ssh_runner.go:195] Run: containerd --version
	I0916 10:30:23.208730 2064308 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:30:23.210246 2064308 cli_runner.go:164] Run: docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:30:23.225302 2064308 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:30:23.229071 2064308 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:30:23.240048 2064308 kubeadm.go:883] updating cluster {Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Cus
tomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:30:23.240172 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:30:23.240246 2064308 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:30:23.276242 2064308 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:30:23.276266 2064308 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:30:23.276331 2064308 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:30:23.312895 2064308 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:30:23.312924 2064308 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:30:23.312933 2064308 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 containerd true true} ...
	I0916 10:30:23.313028 2064308 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=addons-451841 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:30:23.313095 2064308 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:30:23.348552 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:30:23.348577 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:30:23.348587 2064308 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:30:23.348609 2064308 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-451841 NodeName:addons-451841 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc
/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:30:23.348742 2064308 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "addons-451841"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:30:23.348817 2064308 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:30:23.357634 2064308 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:30:23.357705 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:30:23.366468 2064308 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:30:23.385942 2064308 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:30:23.404422 2064308 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2167 bytes)
	I0916 10:30:23.422831 2064308 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:30:23.426382 2064308 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:30:23.437337 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:23.533359 2064308 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:30:23.547523 2064308 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841 for IP: 192.168.49.2
	I0916 10:30:23.547546 2064308 certs.go:194] generating shared ca certs ...
	I0916 10:30:23.547562 2064308 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:23.548238 2064308 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:30:24.056004 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt ...
	I0916 10:30:24.056043 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt: {Name:mk8fa0c4ced40ca68ac874100ce374f588dfea0b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.056261 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key ...
	I0916 10:30:24.056276 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key: {Name:mk04aab579c9f6bfd22c8de7442d64e7264cf4f3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.056381 2064308 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:30:24.923761 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt ...
	I0916 10:30:24.923793 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt: {Name:mke93617c0d085600c816f9e0c290a24fbe662eb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.923996 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key ...
	I0916 10:30:24.924009 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key: {Name:mk45200538cf11f718e98e7cfef8cbfcd0dafedf Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.924099 2064308 certs.go:256] generating profile certs ...
	I0916 10:30:24.924161 2064308 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key
	I0916 10:30:24.924189 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt with IP's: []
	I0916 10:30:25.053524 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt ...
	I0916 10:30:25.053557 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: {Name:mk37fa0b7d204f82c8af039a0f580deae8708ef5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.053750 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key ...
	I0916 10:30:25.053764 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key: {Name:mkdb13343be22c0a0f72ff55f3a3cbca00768e68 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.053853 2064308 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707
	I0916 10:30:25.053877 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0916 10:30:25.726904 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 ...
	I0916 10:30:25.726937 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707: {Name:mkf1dd897eefb9f7916ec8408e62b2271e638207 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.727141 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707 ...
	I0916 10:30:25.727156 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707: {Name:mkfbc7b493bc2e7d0b9e7f941111c820f07e3e82 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.727261 2064308 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt
	I0916 10:30:25.727361 2064308 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key
	I0916 10:30:25.727418 2064308 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key
	I0916 10:30:25.727439 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt with IP's: []
	I0916 10:30:26.011801 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt ...
	I0916 10:30:26.011842 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt: {Name:mkb21e9e32e986ac8dbc5fbe6c0db427fdb116ee Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:26.012049 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key ...
	I0916 10:30:26.012065 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key: {Name:mk95b366411d26459b0f1e143cac6384a51d5dfb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:26.012320 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:30:26.012368 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:30:26.012401 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:30:26.012429 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:30:26.013083 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:30:26.039152 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:30:26.064366 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:30:26.093086 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:30:26.116868 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0916 10:30:26.141663 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:30:26.166725 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:30:26.191142 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:30:26.214975 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:30:26.238979 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:30:26.256459 2064308 ssh_runner.go:195] Run: openssl version
	I0916 10:30:26.262089 2064308 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:30:26.271478 2064308 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.274966 2064308 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.275035 2064308 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.281888 2064308 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:30:26.291290 2064308 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:30:26.294471 2064308 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:30:26.294534 2064308 kubeadm.go:392] StartCluster: {Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Custom
QemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:30:26.294629 2064308 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:30:26.294715 2064308 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:30:26.332648 2064308 cri.go:89] found id: ""
	I0916 10:30:26.332740 2064308 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:30:26.341585 2064308 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:30:26.350524 2064308 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:30:26.350588 2064308 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:30:26.359218 2064308 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:30:26.359240 2064308 kubeadm.go:157] found existing configuration files:
	
	I0916 10:30:26.359319 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:30:26.368227 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:30:26.368297 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:30:26.377781 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:30:26.386494 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:30:26.386567 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:30:26.394932 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:30:26.403622 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:30:26.403687 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:30:26.412005 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:30:26.420862 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:30:26.420957 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:30:26.429543 2064308 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:30:26.471767 2064308 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:30:26.472019 2064308 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:30:26.498827 2064308 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:30:26.498904 2064308 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:30:26.498947 2064308 kubeadm.go:310] OS: Linux
	I0916 10:30:26.498998 2064308 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:30:26.499052 2064308 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:30:26.499103 2064308 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:30:26.499154 2064308 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:30:26.499218 2064308 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:30:26.499270 2064308 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:30:26.499320 2064308 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:30:26.499375 2064308 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:30:26.499426 2064308 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:30:26.577650 2064308 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:30:26.577762 2064308 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:30:26.577859 2064308 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:30:26.583045 2064308 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:30:26.586527 2064308 out.go:235]   - Generating certificates and keys ...
	I0916 10:30:26.586988 2064308 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:30:26.587103 2064308 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:30:26.754645 2064308 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:30:27.554793 2064308 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:30:28.039725 2064308 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:30:28.690015 2064308 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:30:29.764620 2064308 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:30:29.764907 2064308 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [addons-451841 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:30:30.341274 2064308 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:30:30.342274 2064308 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [addons-451841 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:30:30.576739 2064308 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:30:31.765912 2064308 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:30:33.601844 2064308 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:30:33.602129 2064308 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:30:34.584274 2064308 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:30:35.213888 2064308 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:30:35.990415 2064308 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:30:36.165269 2064308 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:30:36.564139 2064308 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:30:36.565009 2064308 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:30:36.568128 2064308 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:30:36.570826 2064308 out.go:235]   - Booting up control plane ...
	I0916 10:30:36.570944 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:30:36.571026 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:30:36.571834 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:30:36.583080 2064308 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:30:36.589082 2064308 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:30:36.589162 2064308 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:30:36.685676 2064308 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:30:36.685796 2064308 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:30:37.686643 2064308 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00127007s
	I0916 10:30:37.686760 2064308 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:30:44.689772 2064308 kubeadm.go:310] [api-check] The API server is healthy after 7.003101119s
	I0916 10:30:44.709044 2064308 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:30:44.727931 2064308 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:30:44.754458 2064308 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:30:44.754737 2064308 kubeadm.go:310] [mark-control-plane] Marking the node addons-451841 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:30:44.766739 2064308 kubeadm.go:310] [bootstrap-token] Using token: dx9pov.rexyyitopznv0w4v
	I0916 10:30:44.769416 2064308 out.go:235]   - Configuring RBAC rules ...
	I0916 10:30:44.769548 2064308 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:30:44.776785 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:30:44.785617 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:30:44.789704 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:30:44.794016 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:30:44.798127 2064308 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:30:45.099673 2064308 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:30:45.534575 2064308 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:30:46.098271 2064308 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:30:46.099422 2064308 kubeadm.go:310] 
	I0916 10:30:46.099510 2064308 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:30:46.099519 2064308 kubeadm.go:310] 
	I0916 10:30:46.099624 2064308 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:30:46.099640 2064308 kubeadm.go:310] 
	I0916 10:30:46.099673 2064308 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:30:46.099733 2064308 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:30:46.099783 2064308 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:30:46.099787 2064308 kubeadm.go:310] 
	I0916 10:30:46.099841 2064308 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:30:46.099846 2064308 kubeadm.go:310] 
	I0916 10:30:46.099898 2064308 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:30:46.099903 2064308 kubeadm.go:310] 
	I0916 10:30:46.099959 2064308 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:30:46.100036 2064308 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:30:46.100108 2064308 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:30:46.100113 2064308 kubeadm.go:310] 
	I0916 10:30:46.100201 2064308 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:30:46.100280 2064308 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:30:46.100285 2064308 kubeadm.go:310] 
	I0916 10:30:46.100377 2064308 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token dx9pov.rexyyitopznv0w4v \
	I0916 10:30:46.100482 2064308 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 10:30:46.100503 2064308 kubeadm.go:310] 	--control-plane 
	I0916 10:30:46.100507 2064308 kubeadm.go:310] 
	I0916 10:30:46.100599 2064308 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:30:46.100604 2064308 kubeadm.go:310] 
	I0916 10:30:46.100684 2064308 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token dx9pov.rexyyitopznv0w4v \
	I0916 10:30:46.100792 2064308 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 10:30:46.104209 2064308 kubeadm.go:310] W0916 10:30:26.468492    1025 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:30:46.104508 2064308 kubeadm.go:310] W0916 10:30:26.469422    1025 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:30:46.104733 2064308 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:30:46.104841 2064308 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:30:46.104863 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:30:46.104872 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:30:46.107753 2064308 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:30:46.110419 2064308 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:30:46.114304 2064308 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:30:46.114327 2064308 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:30:46.132060 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:30:46.405649 2064308 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:30:46.405772 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:46.405844 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-451841 minikube.k8s.io/updated_at=2024_09_16T10_30_46_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=addons-451841 minikube.k8s.io/primary=true
	I0916 10:30:46.544610 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:46.544668 2064308 ops.go:34] apiserver oom_adj: -16
	I0916 10:30:47.045343 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:47.544713 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:48.045593 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:48.545262 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:49.044804 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:49.545373 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:50.045197 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:50.206616 2064308 kubeadm.go:1113] duration metric: took 3.800886781s to wait for elevateKubeSystemPrivileges
	I0916 10:30:50.206650 2064308 kubeadm.go:394] duration metric: took 23.912135022s to StartCluster
	I0916 10:30:50.206760 2064308 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:50.206888 2064308 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:30:50.207291 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:50.207495 2064308 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:30:50.207664 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:30:50.207912 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:50.207954 2064308 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0916 10:30:50.208037 2064308 addons.go:69] Setting yakd=true in profile "addons-451841"
	I0916 10:30:50.208056 2064308 addons.go:234] Setting addon yakd=true in "addons-451841"
	I0916 10:30:50.208079 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.208590 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.209289 2064308 addons.go:69] Setting metrics-server=true in profile "addons-451841"
	I0916 10:30:50.209312 2064308 addons.go:234] Setting addon metrics-server=true in "addons-451841"
	I0916 10:30:50.209362 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.209903 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.210207 2064308 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-451841"
	I0916 10:30:50.210240 2064308 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-451841"
	I0916 10:30:50.210263 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.210767 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.218758 2064308 addons.go:69] Setting registry=true in profile "addons-451841"
	I0916 10:30:50.218798 2064308 addons.go:234] Setting addon registry=true in "addons-451841"
	I0916 10:30:50.218832 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.219427 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.219602 2064308 addons.go:69] Setting cloud-spanner=true in profile "addons-451841"
	I0916 10:30:50.219647 2064308 addons.go:234] Setting addon cloud-spanner=true in "addons-451841"
	I0916 10:30:50.219685 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.221722 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.222266 2064308 addons.go:69] Setting storage-provisioner=true in profile "addons-451841"
	I0916 10:30:50.222288 2064308 addons.go:234] Setting addon storage-provisioner=true in "addons-451841"
	I0916 10:30:50.222314 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.222854 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.243982 2064308 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-451841"
	I0916 10:30:50.244056 2064308 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-451841"
	I0916 10:30:50.244103 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.244878 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.250996 2064308 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-451841"
	I0916 10:30:50.251033 2064308 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-451841"
	I0916 10:30:50.251403 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.262479 2064308 addons.go:69] Setting volcano=true in profile "addons-451841"
	I0916 10:30:50.262526 2064308 addons.go:234] Setting addon volcano=true in "addons-451841"
	I0916 10:30:50.262567 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.263124 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.310432 2064308 addons.go:69] Setting default-storageclass=true in profile "addons-451841"
	I0916 10:30:50.310537 2064308 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-451841"
	I0916 10:30:50.311117 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.312245 2064308 addons.go:69] Setting volumesnapshots=true in profile "addons-451841"
	I0916 10:30:50.312377 2064308 addons.go:234] Setting addon volumesnapshots=true in "addons-451841"
	I0916 10:30:50.312448 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.313757 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.327865 2064308 addons.go:69] Setting gcp-auth=true in profile "addons-451841"
	I0916 10:30:50.327962 2064308 mustload.go:65] Loading cluster: addons-451841
	I0916 10:30:50.330380 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:50.330866 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.331146 2064308 out.go:177] * Verifying Kubernetes components...
	I0916 10:30:50.334941 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:50.349812 2064308 addons.go:69] Setting ingress=true in profile "addons-451841"
	I0916 10:30:50.349850 2064308 addons.go:234] Setting addon ingress=true in "addons-451841"
	I0916 10:30:50.349897 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.350438 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.350648 2064308 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-451841"
	I0916 10:30:50.350723 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.351151 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.369853 2064308 addons.go:69] Setting ingress-dns=true in profile "addons-451841"
	I0916 10:30:50.369893 2064308 addons.go:234] Setting addon ingress-dns=true in "addons-451841"
	I0916 10:30:50.369937 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.370407 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.375867 2064308 out.go:177]   - Using image docker.io/registry:2.8.3
	I0916 10:30:50.382808 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0916 10:30:50.384082 2064308 addons.go:69] Setting inspektor-gadget=true in profile "addons-451841"
	I0916 10:30:50.384111 2064308 addons.go:234] Setting addon inspektor-gadget=true in "addons-451841"
	I0916 10:30:50.384143 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.384714 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.421644 2064308 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.2
	I0916 10:30:50.424401 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0916 10:30:50.424443 2064308 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0916 10:30:50.424517 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.438309 2064308 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.16.2
	I0916 10:30:50.438567 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0916 10:30:50.438585 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (860 bytes)
	I0916 10:30:50.438646 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.439842 2064308 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0916 10:30:50.440236 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-webhook-manager:v1.9.0
	I0916 10:30:50.440402 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.462370 2064308 addons.go:234] Setting addon default-storageclass=true in "addons-451841"
	I0916 10:30:50.462409 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.463191 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.463463 2064308 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:30:50.466889 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0916 10:30:50.467021 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.474834 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0916 10:30:50.474857 2064308 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0916 10:30:50.474919 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.484574 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:30:50.485713 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0916 10:30:50.508488 2064308 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.23
	I0916 10:30:50.525937 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-controller-manager:v1.9.0
	I0916 10:30:50.526169 2064308 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:30:50.526183 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:30:50.526247 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.542222 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0916 10:30:50.543269 2064308 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0916 10:30:50.543418 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0916 10:30:50.543483 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.563839 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-scheduler:v1.9.0
	I0916 10:30:50.567954 2064308 addons.go:431] installing /etc/kubernetes/addons/volcano-deployment.yaml
	I0916 10:30:50.567983 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volcano-deployment.yaml (434001 bytes)
	I0916 10:30:50.568053 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.583888 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.587279 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.3
	I0916 10:30:50.587486 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.11.2
	I0916 10:30:50.589757 2064308 out.go:177]   - Using image docker.io/busybox:stable
	I0916 10:30:50.589894 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0916 10:30:50.592333 2064308 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:30:50.592357 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0916 10:30:50.592588 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.594469 2064308 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0916 10:30:50.594639 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:30:50.596571 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0916 10:30:50.596784 2064308 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:30:50.596798 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0916 10:30:50.596863 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.628847 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:30:50.631659 2064308 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:30:50.631684 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0916 10:30:50.631748 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.645470 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.650239 2064308 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0
	I0916 10:30:50.650364 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0916 10:30:50.650401 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0916 10:30:50.652227 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.653039 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0916 10:30:50.654718 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0916 10:30:50.654790 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.661463 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	I0916 10:30:50.661708 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.654527 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0916 10:30:50.662209 2064308 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0916 10:30:50.662349 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.674173 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0916 10:30:50.676994 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0916 10:30:50.680275 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0916 10:30:50.680305 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0916 10:30:50.680378 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.691037 2064308 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:30:50.691057 2064308 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:30:50.691123 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.774282 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.780046 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.807312 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.827826 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.831006 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.853363 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.867169 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.875051 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.875081 2064308 retry.go:31] will retry after 209.079202ms: ssh: handshake failed: EOF
	I0916 10:30:50.875514 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.878034 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.878076 2064308 retry.go:31] will retry after 358.329045ms: ssh: handshake failed: EOF
	I0916 10:30:50.878970 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.891671 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.913115 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.913144 2064308 retry.go:31] will retry after 291.220359ms: ssh: handshake failed: EOF
	W0916 10:30:51.085514 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:51.085558 2064308 retry.go:31] will retry after 406.090408ms: ssh: handshake failed: EOF
	I0916 10:30:51.380959 2064308 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml": (1.173254923s)
	I0916 10:30:51.381043 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:30:51.381158 2064308 ssh_runner.go:235] Completed: sudo systemctl daemon-reload: (1.046146925s)
	I0916 10:30:51.381191 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:30:51.381193 2064308 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:30:51.393457 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0916 10:30:51.393478 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0916 10:30:51.405074 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:30:51.536141 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:30:51.553523 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0916 10:30:51.553553 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0916 10:30:51.664299 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0916 10:30:51.664331 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0916 10:30:51.694553 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0916 10:30:51.694580 2064308 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0916 10:30:51.695380 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:30:51.703369 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0916 10:30:51.703394 2064308 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0916 10:30:51.711436 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0916 10:30:51.711460 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0916 10:30:51.716209 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml
	I0916 10:30:51.833745 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0916 10:30:51.872114 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0916 10:30:51.872156 2064308 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0916 10:30:51.879573 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0916 10:30:51.879603 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0916 10:30:51.894115 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:30:51.927534 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0916 10:30:51.927573 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0916 10:30:51.967967 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0916 10:30:51.967997 2064308 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0916 10:30:51.987647 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:30:51.987672 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0916 10:30:52.018799 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0916 10:30:52.018835 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0916 10:30:52.040829 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:30:52.040863 2064308 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0916 10:30:52.062309 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0916 10:30:52.062358 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0916 10:30:52.141020 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0916 10:30:52.141055 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0916 10:30:52.150339 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:30:52.156143 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0916 10:30:52.156182 2064308 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0916 10:30:52.267727 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0916 10:30:52.267754 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0916 10:30:52.278358 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0916 10:30:52.278410 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0916 10:30:52.295260 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:30:52.315396 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:30:52.396988 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0916 10:30:52.397029 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0916 10:30:52.414014 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:30:52.414040 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0916 10:30:52.514419 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0916 10:30:52.514447 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0916 10:30:52.534199 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0916 10:30:52.534239 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0916 10:30:52.671597 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (1.290515457s)
	I0916 10:30:52.715081 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0916 10:30:52.715111 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0916 10:30:52.719191 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:30:52.832284 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0916 10:30:52.832313 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0916 10:30:52.924488 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0916 10:30:52.924521 2064308 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0916 10:30:53.168769 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:30:53.168802 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0916 10:30:53.177620 2064308 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (1.796357981s)
	I0916 10:30:53.177657 2064308 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:30:53.177732 2064308 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (1.79643144s)
	I0916 10:30:53.179507 2064308 node_ready.go:35] waiting up to 6m0s for node "addons-451841" to be "Ready" ...
	I0916 10:30:53.184404 2064308 node_ready.go:49] node "addons-451841" has status "Ready":"True"
	I0916 10:30:53.184443 2064308 node_ready.go:38] duration metric: took 4.710029ms for node "addons-451841" to be "Ready" ...
	I0916 10:30:53.184458 2064308 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:30:53.197525 2064308 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace to be "Ready" ...
	I0916 10:30:53.269899 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0916 10:30:53.269941 2064308 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0916 10:30:53.282557 2064308 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:30:53.282590 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0916 10:30:53.466493 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0916 10:30:53.466519 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0916 10:30:53.471643 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:30:53.602578 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:30:53.683185 2064308 kapi.go:214] "coredns" deployment in "kube-system" namespace and "addons-451841" context rescaled to 1 replicas
	I0916 10:30:53.701295 2064308 pod_ready.go:98] error getting pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bvmss" not found
	I0916 10:30:53.701323 2064308 pod_ready.go:82] duration metric: took 503.765362ms for pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace to be "Ready" ...
	E0916 10:30:53.701335 2064308 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bvmss" not found
	I0916 10:30:53.701342 2064308 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace to be "Ready" ...
	I0916 10:30:53.722187 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0916 10:30:53.722214 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0916 10:30:54.162813 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:30:54.162856 2064308 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0916 10:30:54.507449 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:30:55.304651 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (3.899501244s)
	I0916 10:30:55.634996 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (3.939582165s)
	I0916 10:30:55.635110 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (4.098941534s)
	I0916 10:30:55.711983 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:30:57.666996 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0916 10:30:57.667089 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:57.696419 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:57.712916 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:30:58.304674 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0916 10:30:58.423037 2064308 addons.go:234] Setting addon gcp-auth=true in "addons-451841"
	I0916 10:30:58.423145 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:58.423647 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:58.453963 2064308 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0916 10:30:58.454022 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:58.488418 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:59.724111 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:01.085964 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml: (9.369716418s)
	I0916 10:31:01.086088 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (9.252309206s)
	I0916 10:31:01.086143 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (9.191998071s)
	I0916 10:31:01.086179 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (8.93580956s)
	I0916 10:31:01.086966 2064308 addons.go:475] Verifying addon registry=true in "addons-451841"
	I0916 10:31:01.086280 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (8.790993869s)
	I0916 10:31:01.087161 2064308 addons.go:475] Verifying addon ingress=true in "addons-451841"
	I0916 10:31:01.086364 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (8.367136002s)
	I0916 10:31:01.086423 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (7.614752608s)
	I0916 10:31:01.086494 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (7.483872887s)
	I0916 10:31:01.086607 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (8.771185269s)
	I0916 10:31:01.087690 2064308 addons.go:475] Verifying addon metrics-server=true in "addons-451841"
	W0916 10:31:01.087784 2064308 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:31:01.087807 2064308 retry.go:31] will retry after 241.995667ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:31:01.089709 2064308 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-451841 service yakd-dashboard -n yakd-dashboard
	
	I0916 10:31:01.089714 2064308 out.go:177] * Verifying ingress addon...
	I0916 10:31:01.089782 2064308 out.go:177] * Verifying registry addon...
	I0916 10:31:01.092615 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0916 10:31:01.093670 2064308 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0916 10:31:01.146629 2064308 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0916 10:31:01.146661 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:01.147988 2064308 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:31:01.148013 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:01.330778 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:31:01.607432 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:01.608116 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:01.783267 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (7.275754054s)
	I0916 10:31:01.783417 2064308 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-451841"
	I0916 10:31:01.783367 2064308 ssh_runner.go:235] Completed: cat /var/lib/minikube/google_application_credentials.json: (3.329378043s)
	I0916 10:31:01.785766 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:31:01.785796 2064308 out.go:177] * Verifying csi-hostpath-driver addon...
	I0916 10:31:01.788664 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0916 10:31:01.789894 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0916 10:31:01.794958 2064308 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:31:01.795006 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:01.797295 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0916 10:31:01.797332 2064308 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0916 10:31:01.893997 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0916 10:31:01.894071 2064308 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0916 10:31:01.937742 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:31:01.937810 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0916 10:31:01.987240 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:31:02.097286 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:02.100875 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:02.209340 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:02.305635 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:02.597370 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:02.599723 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:02.795942 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:03.100397 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:03.103196 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:03.312002 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:03.374850 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml: (1.387566006s)
	I0916 10:31:03.375988 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (2.045090366s)
	I0916 10:31:03.378127 2064308 addons.go:475] Verifying addon gcp-auth=true in "addons-451841"
	I0916 10:31:03.381777 2064308 out.go:177] * Verifying gcp-auth addon...
	I0916 10:31:03.384298 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0916 10:31:03.406084 2064308 kapi.go:86] Found 0 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:31:03.599867 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:03.600481 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:03.720241 2064308 pod_ready.go:93] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.720276 2064308 pod_ready.go:82] duration metric: took 10.018926311s for pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.720289 2064308 pod_ready.go:79] waiting up to 6m0s for pod "etcd-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.746042 2064308 pod_ready.go:93] pod "etcd-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.746067 2064308 pod_ready.go:82] duration metric: took 25.771231ms for pod "etcd-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.746081 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.752533 2064308 pod_ready.go:93] pod "kube-apiserver-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.752559 2064308 pod_ready.go:82] duration metric: took 6.470582ms for pod "kube-apiserver-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.752571 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.758462 2064308 pod_ready.go:93] pod "kube-controller-manager-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.758495 2064308 pod_ready.go:82] duration metric: took 5.916018ms for pod "kube-controller-manager-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.758507 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tltkn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.765336 2064308 pod_ready.go:93] pod "kube-proxy-tltkn" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.765369 2064308 pod_ready.go:82] duration metric: took 6.854119ms for pod "kube-proxy-tltkn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.765382 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.795811 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:04.099344 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:04.100673 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:04.104903 2064308 pod_ready.go:93] pod "kube-scheduler-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:04.104972 2064308 pod_ready.go:82] duration metric: took 339.581954ms for pod "kube-scheduler-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:04.104999 2064308 pod_ready.go:79] waiting up to 6m0s for pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:04.295860 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:04.598910 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:04.602815 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:04.795954 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:05.100224 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:05.101534 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:05.296166 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:05.599439 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:05.601426 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:05.795442 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:06.102393 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:06.103036 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:06.122130 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:06.299045 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:06.599932 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:06.601206 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:06.814263 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:07.096848 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:07.101223 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:07.295217 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:07.599444 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:07.600431 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:07.795082 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:08.101892 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:08.102976 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:08.296014 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:08.598395 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:08.598643 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:08.613020 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:08.795739 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:09.103941 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:09.104967 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:09.295694 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:09.599659 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:09.601180 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:09.796354 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:10.098446 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:10.099577 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:10.295198 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:10.597281 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:10.599286 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:10.616287 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:10.795720 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:11.097801 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:11.099342 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:11.297048 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:11.599247 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:11.599974 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:11.794513 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:12.097432 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:12.099058 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:12.295097 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:12.598578 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:12.599897 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:12.796822 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:13.096898 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:13.098940 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:13.112547 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:13.295802 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:13.599642 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:13.600761 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:13.794583 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:14.096452 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:14.098429 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:14.297517 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:14.598010 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:14.599983 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:14.795140 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:15.104125 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:15.104975 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:15.113778 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:15.295679 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:15.598018 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:15.598555 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:15.795791 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:16.096811 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:16.099236 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:16.296057 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:16.597945 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:16.599646 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:16.797262 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:17.098985 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:17.099689 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:17.295469 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:17.599269 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:17.600683 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:17.611951 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:17.794427 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:18.099862 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:18.101710 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:18.296191 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:18.596772 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:18.600049 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:18.811403 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:19.098130 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:19.099143 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:19.111509 2064308 pod_ready.go:93] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:19.111570 2064308 pod_ready.go:82] duration metric: took 15.006549742s for pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:19.111587 2064308 pod_ready.go:39] duration metric: took 25.927112572s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:31:19.111604 2064308 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:31:19.111670 2064308 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:31:19.162518 2064308 api_server.go:72] duration metric: took 28.954985289s to wait for apiserver process to appear ...
	I0916 10:31:19.162546 2064308 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:31:19.162572 2064308 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:31:19.179642 2064308 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:31:19.180628 2064308 api_server.go:141] control plane version: v1.31.1
	I0916 10:31:19.180658 2064308 api_server.go:131] duration metric: took 18.103285ms to wait for apiserver health ...
	I0916 10:31:19.180668 2064308 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:31:19.201200 2064308 system_pods.go:59] 18 kube-system pods found
	I0916 10:31:19.201280 2064308 system_pods.go:61] "coredns-7c65d6cfc9-jqthn" [bc44b698-a863-4ab8-85d8-5bd54d141bd3] Running
	I0916 10:31:19.201299 2064308 system_pods.go:61] "csi-hostpath-attacher-0" [b2e317f7-5d09-4cd6-823d-cc849d3bf9e2] Running
	I0916 10:31:19.201316 2064308 system_pods.go:61] "csi-hostpath-resizer-0" [1d31c631-4247-4ffe-8a87-9b11803bc389] Running
	I0916 10:31:19.201350 2064308 system_pods.go:61] "csi-hostpathplugin-r28vj" [ad580f65-4a4a-445c-bec7-b518245397ea] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0916 10:31:19.201372 2064308 system_pods.go:61] "etcd-addons-451841" [1a9d168e-83c3-4fc0-b188-1da688352b51] Running
	I0916 10:31:19.201392 2064308 system_pods.go:61] "kindnet-zckxr" [b831bf61-6a35-4681-afe4-f5a0f875a7b5] Running
	I0916 10:31:19.201409 2064308 system_pods.go:61] "kube-apiserver-addons-451841" [db38aff8-23ff-466f-a1b0-ab5f2041183c] Running
	I0916 10:31:19.201425 2064308 system_pods.go:61] "kube-controller-manager-addons-451841" [53d22d78-137d-4306-b8df-d9a55061c9df] Running
	I0916 10:31:19.201458 2064308 system_pods.go:61] "kube-ingress-dns-minikube" [c77fac29-39b0-4e39-94f6-a72ac395b130] Running
	I0916 10:31:19.201483 2064308 system_pods.go:61] "kube-proxy-tltkn" [073e7c9c-1ec1-45db-9603-68862d546266] Running
	I0916 10:31:19.201501 2064308 system_pods.go:61] "kube-scheduler-addons-451841" [5b953c55-7b45-4221-89bf-177d1a4efd05] Running
	I0916 10:31:19.201520 2064308 system_pods.go:61] "metrics-server-84c5f94fbc-q47pm" [2252baaa-acbc-43dd-b38e-e8cd59ac7825] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0916 10:31:19.201537 2064308 system_pods.go:61] "nvidia-device-plugin-daemonset-l6r5c" [9645617a-ab29-4c4e-a4ec-4ea217ffb5fb] Running
	I0916 10:31:19.201566 2064308 system_pods.go:61] "registry-66c9cd494c-l957b" [17af08bf-9965-4bec-8d1b-0c4c37167ac1] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
	I0916 10:31:19.201591 2064308 system_pods.go:61] "registry-proxy-9cpxl" [10361d7a-9abb-41e6-88eb-2194d01e1301] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0916 10:31:19.201613 2064308 system_pods.go:61] "snapshot-controller-56fcc65765-6llf9" [7febb7b8-15ce-499d-83f7-b82e03e35902] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.201634 2064308 system_pods.go:61] "snapshot-controller-56fcc65765-qxvll" [0a01a9d7-31e0-4965-baba-078f44b17fac] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.201663 2064308 system_pods.go:61] "storage-provisioner" [6a86a07f-e25d-4ac9-9c07-dc9ae0048083] Running
	I0916 10:31:19.201686 2064308 system_pods.go:74] duration metric: took 21.010389ms to wait for pod list to return data ...
	I0916 10:31:19.201707 2064308 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:31:19.204845 2064308 default_sa.go:45] found service account: "default"
	I0916 10:31:19.204868 2064308 default_sa.go:55] duration metric: took 3.144001ms for default service account to be created ...
	I0916 10:31:19.204877 2064308 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:31:19.219489 2064308 system_pods.go:86] 18 kube-system pods found
	I0916 10:31:19.219563 2064308 system_pods.go:89] "coredns-7c65d6cfc9-jqthn" [bc44b698-a863-4ab8-85d8-5bd54d141bd3] Running
	I0916 10:31:19.219586 2064308 system_pods.go:89] "csi-hostpath-attacher-0" [b2e317f7-5d09-4cd6-823d-cc849d3bf9e2] Running
	I0916 10:31:19.219605 2064308 system_pods.go:89] "csi-hostpath-resizer-0" [1d31c631-4247-4ffe-8a87-9b11803bc389] Running
	I0916 10:31:19.219640 2064308 system_pods.go:89] "csi-hostpathplugin-r28vj" [ad580f65-4a4a-445c-bec7-b518245397ea] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0916 10:31:19.219669 2064308 system_pods.go:89] "etcd-addons-451841" [1a9d168e-83c3-4fc0-b188-1da688352b51] Running
	I0916 10:31:19.219690 2064308 system_pods.go:89] "kindnet-zckxr" [b831bf61-6a35-4681-afe4-f5a0f875a7b5] Running
	I0916 10:31:19.219710 2064308 system_pods.go:89] "kube-apiserver-addons-451841" [db38aff8-23ff-466f-a1b0-ab5f2041183c] Running
	I0916 10:31:19.219728 2064308 system_pods.go:89] "kube-controller-manager-addons-451841" [53d22d78-137d-4306-b8df-d9a55061c9df] Running
	I0916 10:31:19.219766 2064308 system_pods.go:89] "kube-ingress-dns-minikube" [c77fac29-39b0-4e39-94f6-a72ac395b130] Running
	I0916 10:31:19.219784 2064308 system_pods.go:89] "kube-proxy-tltkn" [073e7c9c-1ec1-45db-9603-68862d546266] Running
	I0916 10:31:19.219799 2064308 system_pods.go:89] "kube-scheduler-addons-451841" [5b953c55-7b45-4221-89bf-177d1a4efd05] Running
	I0916 10:31:19.219819 2064308 system_pods.go:89] "metrics-server-84c5f94fbc-q47pm" [2252baaa-acbc-43dd-b38e-e8cd59ac7825] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0916 10:31:19.219847 2064308 system_pods.go:89] "nvidia-device-plugin-daemonset-l6r5c" [9645617a-ab29-4c4e-a4ec-4ea217ffb5fb] Running
	I0916 10:31:19.219875 2064308 system_pods.go:89] "registry-66c9cd494c-l957b" [17af08bf-9965-4bec-8d1b-0c4c37167ac1] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
	I0916 10:31:19.219896 2064308 system_pods.go:89] "registry-proxy-9cpxl" [10361d7a-9abb-41e6-88eb-2194d01e1301] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0916 10:31:19.219915 2064308 system_pods.go:89] "snapshot-controller-56fcc65765-6llf9" [7febb7b8-15ce-499d-83f7-b82e03e35902] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.219935 2064308 system_pods.go:89] "snapshot-controller-56fcc65765-qxvll" [0a01a9d7-31e0-4965-baba-078f44b17fac] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.219968 2064308 system_pods.go:89] "storage-provisioner" [6a86a07f-e25d-4ac9-9c07-dc9ae0048083] Running
	I0916 10:31:19.219989 2064308 system_pods.go:126] duration metric: took 15.104177ms to wait for k8s-apps to be running ...
	I0916 10:31:19.220008 2064308 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:31:19.220090 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:31:19.263162 2064308 system_svc.go:56] duration metric: took 43.144676ms WaitForService to wait for kubelet
	I0916 10:31:19.263243 2064308 kubeadm.go:582] duration metric: took 29.055714708s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:31:19.263279 2064308 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:31:19.272478 2064308 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:31:19.272561 2064308 node_conditions.go:123] node cpu capacity is 2
	I0916 10:31:19.272591 2064308 node_conditions.go:105] duration metric: took 9.29091ms to run NodePressure ...
	I0916 10:31:19.272616 2064308 start.go:241] waiting for startup goroutines ...
	I0916 10:31:19.305039 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:19.605207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:19.605801 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:19.797193 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:20.099691 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:20.101048 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:20.295291 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:20.597682 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:20.598569 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:20.797887 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:21.096766 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:21.099258 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:21.294755 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:21.597973 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:21.600238 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:21.803444 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:22.097870 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:22.100851 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:22.295006 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:22.597700 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:22.598742 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:22.795839 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:23.096175 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:23.098155 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:23.294814 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:23.596166 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:23.598634 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:23.795172 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:24.096643 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:24.099715 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:24.297255 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:24.598721 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:24.599933 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:24.795260 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:25.098369 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:25.101032 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:25.295093 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:25.597734 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:25.597966 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:25.795323 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:26.096041 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:26.099677 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:26.295063 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:26.597593 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:26.599159 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:26.795825 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:27.098811 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:27.099453 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:27.295012 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:27.597182 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:27.601645 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:27.795056 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:28.128064 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:28.129640 2064308 kapi.go:107] duration metric: took 27.037023988s to wait for kubernetes.io/minikube-addons=registry ...
	I0916 10:31:28.325425 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:28.598623 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:28.795615 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:29.104511 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:29.295646 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:29.598962 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:29.795067 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:30.099851 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:30.296647 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:30.598332 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:30.796058 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:31.099992 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:31.294874 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:31.598117 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:31.796531 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:32.098393 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:32.295287 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:32.598055 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:32.795217 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:33.099311 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:33.295339 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:33.598188 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:33.795029 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:34.098345 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:34.295712 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:34.598442 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:34.795386 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:35.098874 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:35.295415 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:35.598136 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:35.795586 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:36.098658 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:36.294379 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:36.598764 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:36.795529 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:37.098523 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:37.296711 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:37.601252 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:37.799472 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:38.100971 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:38.298686 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:38.599535 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:38.795481 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:39.098734 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:39.296827 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:39.611876 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:39.851830 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:40.108718 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:40.295843 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:40.599050 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:40.795575 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:41.098568 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:41.295957 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:41.598039 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:41.796038 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:42.099484 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:42.295707 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:42.598887 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:42.795416 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:43.099107 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:43.295766 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:43.599999 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:43.795242 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:44.098395 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:44.295957 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:44.600054 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:44.794470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:45.100863 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:45.295685 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:45.599065 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:45.798514 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:46.099116 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:46.296057 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:46.599389 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:46.796585 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:47.099083 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:47.296145 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:47.598490 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:47.797079 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:48.100448 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:48.295294 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:48.598227 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:48.794662 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:49.119185 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:49.295351 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:49.598797 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:49.794962 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:50.098374 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:50.295501 2064308 kapi.go:107] duration metric: took 48.505612662s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0916 10:31:50.598550 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:51.098277 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:51.598976 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:52.098206 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:52.597960 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:53.098585 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:53.598884 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:54.098582 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:54.598852 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:55.098478 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:55.598212 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:56.098517 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:56.598412 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:57.098499 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:57.598710 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:58.097637 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:58.599134 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:59.098778 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:59.598318 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:00.130067 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:00.598955 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:01.098901 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:01.598465 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:02.098925 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:02.598148 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:03.102570 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:03.598295 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:04.099028 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:04.598994 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:05.100186 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:05.598454 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:06.098931 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:06.598336 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:07.098800 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:07.599302 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:08.098401 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:08.599190 2064308 kapi.go:107] duration metric: took 1m7.505513413s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0916 10:32:25.388811 2064308 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:32:25.388836 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:25.888825 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:26.388022 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:26.887847 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:27.387834 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:27.888795 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:28.387767 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:28.887542 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:29.388486 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:29.888784 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:30.387676 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:30.888490 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:31.388236 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:31.888242 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:32.387732 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:32.888206 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:33.387868 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:33.887962 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:34.387683 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:34.889279 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:35.388145 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:35.887555 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:36.389045 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:36.887848 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:37.388742 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:37.888016 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:38.388211 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:38.887716 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:39.388708 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:39.888575 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:40.388841 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:40.888385 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:41.388668 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:41.887792 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:42.388021 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:42.888125 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:43.388320 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:43.887796 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:44.388101 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:44.888791 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:45.391207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:45.888190 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:46.387869 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:46.887554 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:47.388470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:47.888177 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:48.387903 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:48.888232 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:49.388449 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:49.888527 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:50.388650 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:50.888495 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:51.388590 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:51.888197 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:52.387563 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:52.888238 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:53.388322 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:53.887557 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:54.388664 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:54.888836 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:55.388171 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:55.888180 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:56.388322 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:56.888567 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:57.388117 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:57.887422 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:58.388230 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:58.887872 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:59.396878 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:59.888550 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:00.394252 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:00.887612 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:01.392523 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:01.888091 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:02.393207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:02.887610 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:03.388745 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:03.888344 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:04.388999 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:04.889012 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:05.390448 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:05.888198 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:06.395413 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:06.889275 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:07.387879 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:07.888183 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:08.388311 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:08.888612 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:09.388334 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:09.887931 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:10.387765 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:10.888317 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:11.388557 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:11.887439 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:12.388213 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:12.887810 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:13.388135 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:13.888239 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:14.388445 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:14.889102 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:15.388533 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:15.887383 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:16.388426 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:16.888022 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:17.388399 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:17.887327 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:18.388016 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:18.887470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:19.388533 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:19.889124 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:20.387631 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:20.888484 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:21.388124 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:21.887946 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:22.388268 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:22.887332 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:23.388224 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:23.887844 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:24.387744 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:24.888405 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:25.388231 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:25.888672 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:26.388063 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:26.888126 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:27.387865 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:27.887552 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:28.387806 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:28.887772 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:29.388587 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:29.888551 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:30.387903 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:30.887507 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:31.388609 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:31.888449 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:32.388259 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:32.887834 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:33.388141 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:33.888934 2064308 kapi.go:107] duration metric: took 2m30.504634261s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0916 10:33:33.890859 2064308 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-451841 cluster.
	I0916 10:33:33.892432 2064308 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0916 10:33:33.893920 2064308 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0916 10:33:33.895584 2064308 out.go:177] * Enabled addons: nvidia-device-plugin, storage-provisioner, ingress-dns, storage-provisioner-rancher, volcano, cloud-spanner, metrics-server, inspektor-gadget, yakd, default-storageclass, volumesnapshots, registry, csi-hostpath-driver, ingress, gcp-auth
	I0916 10:33:33.897279 2064308 addons.go:510] duration metric: took 2m43.689318504s for enable addons: enabled=[nvidia-device-plugin storage-provisioner ingress-dns storage-provisioner-rancher volcano cloud-spanner metrics-server inspektor-gadget yakd default-storageclass volumesnapshots registry csi-hostpath-driver ingress gcp-auth]
	I0916 10:33:33.897342 2064308 start.go:246] waiting for cluster config update ...
	I0916 10:33:33.897367 2064308 start.go:255] writing updated cluster config ...
	I0916 10:33:33.898186 2064308 ssh_runner.go:195] Run: rm -f paused
	I0916 10:33:33.906793 2064308 out.go:177] * Done! kubectl is now configured to use "addons-451841" cluster and "default" namespace by default
	E0916 10:33:33.908425 2064308 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                                     ATTEMPT             POD ID              POD
	f90c8869604c5       6ef582f3ec844       11 minutes ago      Running             gcp-auth                                 0                   9038b6b53facd       gcp-auth-89d5ffd79-pw58v
	6f68aecec6aa2       8b46b1cd48760       12 minutes ago      Running             admission                                0                   e3af2951f3794       volcano-admission-77d7d48b68-sjxcs
	dd63136d8d6ac       289a818c8d9c5       12 minutes ago      Running             controller                               0                   b4699f942aa64       ingress-nginx-controller-bc57996ff-rqhcp
	a490639f0e8aa       ee6d597e62dc8       13 minutes ago      Running             csi-snapshotter                          0                   8260f57befdda       csi-hostpathplugin-r28vj
	317ce7462f733       642ded511e141       13 minutes ago      Running             csi-provisioner                          0                   8260f57befdda       csi-hostpathplugin-r28vj
	324d1501e94dc       420193b27261a       13 minutes ago      Exited              patch                                    2                   13d0568de4b9d       ingress-nginx-admission-patch-z2qc7
	7b9cc7b5195ab       922312104da8a       13 minutes ago      Running             liveness-probe                           0                   8260f57befdda       csi-hostpathplugin-r28vj
	bee97d004dc4d       08f6b2990811a       13 minutes ago      Running             hostpath                                 0                   8260f57befdda       csi-hostpathplugin-r28vj
	19ec6d5fbc0fd       8b46b1cd48760       13 minutes ago      Exited              main                                     0                   009b63594d8b2       volcano-admission-init-bz266
	000463bf50714       d9c7ad4c226bf       13 minutes ago      Running             volcano-scheduler                        0                   d9214c0e709d4       volcano-scheduler-576bc46687-xwjbn
	4dc42ec686d73       1505f556b3a7b       13 minutes ago      Running             volcano-controllers                      0                   343744c6dcf07       volcano-controllers-56675bb4d5-2ltwp
	30f0f6b13e6d5       420193b27261a       13 minutes ago      Exited              create                                   0                   993d0544f3868       ingress-nginx-admission-create-4vr4g
	32df8554c702e       4d1e5c3e97420       13 minutes ago      Running             volume-snapshot-controller               0                   864cff1eb40c5       snapshot-controller-56fcc65765-6llf9
	f28ea158892d3       5548a49bb60ba       13 minutes ago      Running             metrics-server                           0                   f49e908ac9969       metrics-server-84c5f94fbc-q47pm
	7558a63005c7b       0107d56dbc0be       13 minutes ago      Running             node-driver-registrar                    0                   8260f57befdda       csi-hostpathplugin-r28vj
	d181a00ffae8d       4d1e5c3e97420       13 minutes ago      Running             volume-snapshot-controller               0                   5f5c44341cf11       snapshot-controller-56fcc65765-qxvll
	6b271689ecd4e       7ce2150c8929b       13 minutes ago      Running             local-path-provisioner                   0                   de8add92893e8       local-path-provisioner-86d989889c-qkpm6
	98b48c685a09e       487fa743e1e22       13 minutes ago      Running             csi-resizer                              0                   89cb8ade3231b       csi-hostpath-resizer-0
	2472144c5bc6d       1461903ec4fe9       13 minutes ago      Running             csi-external-health-monitor-controller   0                   8260f57befdda       csi-hostpathplugin-r28vj
	0af6491cd95ee       9a80d518f102c       13 minutes ago      Running             csi-attacher                             0                   0d39436266817       csi-hostpath-attacher-0
	9b811a5c5e80c       35508c2f890c4       13 minutes ago      Running             minikube-ingress-dns                     0                   e1ed027bac8d8       kube-ingress-dns-minikube
	5232ad6b096cb       2f6c962e7b831       13 minutes ago      Running             coredns                                  0                   3ad39eb105298       coredns-7c65d6cfc9-jqthn
	4ddb5fa614111       ba04bb24b9575       14 minutes ago      Running             storage-provisioner                      0                   d0cffc65c18c1       storage-provisioner
	64b671b165f6f       6a23fa8fd2b78       14 minutes ago      Running             kindnet-cni                              0                   bd9ef3e1818e4       kindnet-zckxr
	35987f39fe9ef       24a140c548c07       14 minutes ago      Running             kube-proxy                               0                   6a8ebbdde94be       kube-proxy-tltkn
	8769c148a0bb3       27e3830e14027       14 minutes ago      Running             etcd                                     0                   290d52892953c       etcd-addons-451841
	31da3c8e5867c       279f381cb3736       14 minutes ago      Running             kube-controller-manager                  0                   349d5195292e8       kube-controller-manager-addons-451841
	808425f96a229       7f8aa378bb47d       14 minutes ago      Running             kube-scheduler                           0                   50415da17c7f0       kube-scheduler-addons-451841
	2870b9699fd97       d3f53a98c0a9d       14 minutes ago      Running             kube-apiserver                           0                   1d8868dd2cf0d       kube-apiserver-addons-451841
	
	
	==> containerd <==
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.648373925Z" level=info msg="ImageUpdate event name:\"ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.650196895Z" level=info msg="stop pulling image ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec: active requests=0, bytes read=89"
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.653941954Z" level=info msg="Pulled image \"ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\" with image id \"sha256:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd\", repo tag \"\", repo digest \"ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\", size \"72524105\" in 129.568062ms"
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.653989757Z" level=info msg="PullImage \"ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\" returns image reference \"sha256:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd\""
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.656693206Z" level=info msg="CreateContainer within sandbox \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\" for container &ContainerMetadata{Name:gadget,Attempt:7,}"
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.677098875Z" level=info msg="CreateContainer within sandbox \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\" for &ContainerMetadata{Name:gadget,Attempt:7,} returns container id \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\""
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.677938254Z" level=info msg="StartContainer for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\""
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.730010072Z" level=info msg="StartContainer for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" returns successfully"
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.138310098Z" level=error msg="ExecSync for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" failed" error="failed to exec in container: failed to start exec \"18945d8e8329249f45b429c8d2c629c681f79f2690cb164b744d16d6edaa41f1\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.156036547Z" level=error msg="ExecSync for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" failed" error="failed to exec in container: failed to start exec \"24fda1abd5567e07b5878128ccdadf5a45b72b364dd529766ac153034519db72\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.170223237Z" level=error msg="ExecSync for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" failed" error="failed to exec in container: failed to start exec \"b427372ae9babdb028a4500d5cfa0aa496cec7f5f42145c1a1f6f062b2dc9c0c\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.324237655Z" level=info msg="shim disconnected" id=89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df namespace=k8s.io
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.324320912Z" level=warning msg="cleaning up after shim disconnected" id=89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df namespace=k8s.io
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.324332883Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:42:31 addons-451841 containerd[816]: time="2024-09-16T10:42:31.028529096Z" level=info msg="RemoveContainer for \"4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c\""
	Sep 16 10:42:31 addons-451841 containerd[816]: time="2024-09-16T10:42:31.035424189Z" level=info msg="RemoveContainer for \"4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c\" returns successfully"
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.895364751Z" level=info msg="StopPodSandbox for \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\""
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.895453776Z" level=info msg="Container to stop \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" must be in running or unknown state, current state \"CONTAINER_EXITED\""
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.959130956Z" level=info msg="shim disconnected" id=11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257 namespace=k8s.io
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.959197991Z" level=warning msg="cleaning up after shim disconnected" id=11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257 namespace=k8s.io
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.959209019Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.979468540Z" level=info msg="TearDown network for sandbox \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\" successfully"
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.979721749Z" level=info msg="StopPodSandbox for \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\" returns successfully"
	Sep 16 10:44:52 addons-451841 containerd[816]: time="2024-09-16T10:44:52.421531479Z" level=info msg="RemoveContainer for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\""
	Sep 16 10:44:52 addons-451841 containerd[816]: time="2024-09-16T10:44:52.429315487Z" level=info msg="RemoveContainer for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" returns successfully"
	
	
	==> coredns [5232ad6b096cb39cf18a9c11e936d3dae11b081bd6666741f3c42e78161ed09f] <==
	[INFO] 10.244.0.9:45725 - 37874 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000066576s
	[INFO] 10.244.0.9:59523 - 16440 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002202223s
	[INFO] 10.244.0.9:59523 - 22330 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002459412s
	[INFO] 10.244.0.9:50469 - 36811 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000147232s
	[INFO] 10.244.0.9:50469 - 6599 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000154395s
	[INFO] 10.244.0.9:54670 - 20364 "AAAA IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000113739s
	[INFO] 10.244.0.9:54670 - 51376 "A IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000392596s
	[INFO] 10.244.0.9:37135 - 16205 "A IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000064189s
	[INFO] 10.244.0.9:37135 - 64832 "AAAA IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.00005714s
	[INFO] 10.244.0.9:54223 - 7962 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000052168s
	[INFO] 10.244.0.9:54223 - 14360 "AAAA IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000048632s
	[INFO] 10.244.0.9:33840 - 38805 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001404552s
	[INFO] 10.244.0.9:33840 - 4752 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.00164702s
	[INFO] 10.244.0.9:45027 - 58736 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000075766s
	[INFO] 10.244.0.9:45027 - 39026 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000213093s
	[INFO] 10.244.0.24:51483 - 10090 "AAAA IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000202231s
	[INFO] 10.244.0.24:42195 - 64926 "A IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000154649s
	[INFO] 10.244.0.24:32892 - 59527 "AAAA IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000163215s
	[INFO] 10.244.0.24:47611 - 11902 "A IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000096418s
	[INFO] 10.244.0.24:59950 - 37722 "AAAA IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.00008466s
	[INFO] 10.244.0.24:52002 - 29131 "A IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000089403s
	[INFO] 10.244.0.24:38598 - 65011 "AAAA IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.002266241s
	[INFO] 10.244.0.24:60458 - 11928 "A IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.001809759s
	[INFO] 10.244.0.24:43975 - 30277 "A IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 610 0.001862182s
	[INFO] 10.244.0.24:51154 - 58482 "AAAA IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 240 0.002138832s
	
	
	==> describe nodes <==
	Name:               addons-451841
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-451841
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=addons-451841
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_30_46_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-451841
	Annotations:        csi.volume.kubernetes.io/nodeid: {"hostpath.csi.k8s.io":"addons-451841"}
	                    kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:30:42 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-451841
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:44:52 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:42 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-451841
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 003b87cb77e5465aa882d8df5f5cd5ab
	  System UUID:                21a29522-aef6-4d70-a29b-0ea27731fdbe
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (21 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  gcp-auth                    gcp-auth-89d5ffd79-pw58v                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         12m
	  ingress-nginx               ingress-nginx-controller-bc57996ff-rqhcp    100m (5%)     0 (0%)      90Mi (1%)        0 (0%)         14m
	  kube-system                 coredns-7c65d6cfc9-jqthn                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     14m
	  kube-system                 csi-hostpath-attacher-0                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 csi-hostpath-resizer-0                      0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 csi-hostpathplugin-r28vj                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 etcd-addons-451841                          100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         14m
	  kube-system                 kindnet-zckxr                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      14m
	  kube-system                 kube-apiserver-addons-451841                250m (12%)    0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 kube-controller-manager-addons-451841       200m (10%)    0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 kube-ingress-dns-minikube                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 kube-proxy-tltkn                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 kube-scheduler-addons-451841                100m (5%)     0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 metrics-server-84c5f94fbc-q47pm             100m (5%)     0 (0%)      200Mi (2%)       0 (0%)         14m
	  kube-system                 snapshot-controller-56fcc65765-6llf9        0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 snapshot-controller-56fcc65765-qxvll        0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  local-path-storage          local-path-provisioner-86d989889c-qkpm6     0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  volcano-system              volcano-admission-77d7d48b68-sjxcs          0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  volcano-system              volcano-controllers-56675bb4d5-2ltwp        0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  volcano-system              volcano-scheduler-576bc46687-xwjbn          0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                1050m (52%)  100m (5%)
	  memory             510Mi (6%)   220Mi (2%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-1Gi      0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	  hugepages-32Mi     0 (0%)       0 (0%)
	  hugepages-64Ki     0 (0%)       0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 14m                kube-proxy       
	  Normal   NodeAllocatableEnforced  14m                kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 14m                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  14m (x8 over 14m)  kubelet          Node addons-451841 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    14m (x7 over 14m)  kubelet          Node addons-451841 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     14m (x7 over 14m)  kubelet          Node addons-451841 status is now: NodeHasSufficientPID
	  Normal   Starting                 14m                kubelet          Starting kubelet.
	  Normal   Starting                 14m                kubelet          Starting kubelet.
	  Warning  CgroupV1                 14m                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  14m                kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  14m                kubelet          Node addons-451841 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    14m                kubelet          Node addons-451841 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     14m                kubelet          Node addons-451841 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           14m                node-controller  Node addons-451841 event: Registered Node addons-451841 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [8769c148a0bb341cc1dcca117d41b6be795d52ed6e49348d14da26aac1d42f01] <==
	{"level":"info","ts":"2024-09-16T10:30:38.620740Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:30:38.620758Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:30:39.574728Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.574952Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.575045Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.575117Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575193Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575240Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575326Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.581756Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:addons-451841 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:30:39.582006Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:30:39.582134Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:30:39.582418Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:30:39.582525Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:30:39.582434Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.583536Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:30:39.584617Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:30:39.585041Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.590779Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.590980Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.591021Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:30:39.592081Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:40:40.037545Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":1731}
	{"level":"info","ts":"2024-09-16T10:40:40.103709Z","caller":"mvcc/kvstore_compaction.go:69","msg":"finished scheduled compaction","compact-revision":1731,"took":"65.214596ms","hash":4239490720,"current-db-size-bytes":9076736,"current-db-size":"9.1 MB","current-db-size-in-use-bytes":5177344,"current-db-size-in-use":"5.2 MB"}
	{"level":"info","ts":"2024-09-16T10:40:40.103850Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":4239490720,"revision":1731,"compact-revision":-1}
	
	
	==> gcp-auth [f90c8869604c54edfd93d5ef8e6467ed81e6a63fbedf9c5712f155d5d85f40b8] <==
	2024/09/16 10:33:32 GCP Auth Webhook started!
	2024/09/16 10:38:59 Ready to marshal response ...
	2024/09/16 10:38:59 Ready to write response ...
	2024/09/16 10:38:59 Ready to marshal response ...
	2024/09/16 10:38:59 Ready to write response ...
	2024/09/16 10:38:59 Ready to marshal response ...
	2024/09/16 10:38:59 Ready to write response ...
	
	
	==> kernel <==
	 10:44:58 up 1 day, 14:27,  0 users,  load average: 0.47, 0.42, 1.22
	Linux addons-451841 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [64b671b165f6f7bb28b281ddd3fe708221407f35f09389c964253f52887fd626] <==
	I0916 10:42:51.721412       1 main.go:299] handling current node
	I0916 10:43:01.721636       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:01.721670       1 main.go:299] handling current node
	I0916 10:43:11.730806       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:11.730843       1 main.go:299] handling current node
	I0916 10:43:21.724456       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:21.724489       1 main.go:299] handling current node
	I0916 10:43:31.724543       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:31.724584       1 main.go:299] handling current node
	I0916 10:43:41.727823       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:41.727859       1 main.go:299] handling current node
	I0916 10:43:51.720900       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:51.720937       1 main.go:299] handling current node
	I0916 10:44:01.728967       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:01.729002       1 main.go:299] handling current node
	I0916 10:44:11.726855       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:11.726894       1 main.go:299] handling current node
	I0916 10:44:21.723656       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:21.723690       1 main.go:299] handling current node
	I0916 10:44:31.726799       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:31.726835       1 main.go:299] handling current node
	I0916 10:44:41.721111       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:41.721149       1 main.go:299] handling current node
	I0916 10:44:51.721753       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:51.721790       1 main.go:299] handling current node
	
	
	==> kube-apiserver [2870b9699fd97d290c5750a6361bd1eb6ac986ce8fb7e3f9eb6474155c6b1fa8] <==
	W0916 10:32:04.547641       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:05.602664       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.287206       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:06.287255       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:32:06.288974       1 dispatcher.go:225] Failed calling webhook, failing closed mutatepod.volcano.sh: failed calling webhook "mutatepod.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/pods/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.354223       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:06.354265       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:32:06.355906       1 dispatcher.go:225] Failed calling webhook, failing closed mutatepod.volcano.sh: failed calling webhook "mutatepod.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/pods/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.623074       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:07.661520       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:08.760490       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:09.841622       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:10.917089       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:11.983956       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:13.046405       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:14.089526       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:25.289607       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:25.289652       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:33:06.298609       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:33:06.298665       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:33:06.363205       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:33:06.363253       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	I0916 10:38:59.795161       1 alloc.go:330] "allocated clusterIPs" service="headlamp/headlamp" clusterIPs={"IPv4":"10.111.56.3"}
	I0916 10:44:51.852485       1 handler.go:286] Adding GroupVersion gadget.kinvolk.io v1alpha1 to ResourceManager
	W0916 10:44:52.904959       1 cacher.go:171] Terminating all watchers from cacher traces.gadget.kinvolk.io
	
	
	==> kube-controller-manager [31da3c8e5867c3e2a6f4592fba3d201359a6c0c862a2620157496149c91a3b11] <==
	I0916 10:33:39.035647       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:33:39.082659       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:33:39.087612       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:33:49.934314       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	I0916 10:38:37.530598       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="yakd-dashboard/yakd-dashboard-67d98fc6b" duration="21.727µs"
	I0916 10:38:43.718435       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/registry-66c9cd494c" duration="10.191µs"
	I0916 10:38:47.665807       1 namespace_controller.go:187] "Namespace has been deleted" logger="namespace-controller" namespace="yakd-dashboard"
	I0916 10:38:56.054829       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	I0916 10:38:58.694110       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/cloud-spanner-emulator-769b77f747" duration="7.114µs"
	I0916 10:38:59.911454       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="86.11631ms"
	I0916 10:38:59.926177       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="14.113318ms"
	I0916 10:38:59.926285       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="67.922µs"
	I0916 10:38:59.926662       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="38.744µs"
	I0916 10:39:03.454274       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="54.203µs"
	I0916 10:39:03.484504       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="10.723572ms"
	I0916 10:39:03.484593       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="51.142µs"
	I0916 10:39:10.433645       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="6.515µs"
	I0916 10:39:16.650393       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	I0916 10:39:20.588127       1 namespace_controller.go:187] "Namespace has been deleted" logger="namespace-controller" namespace="headlamp"
	I0916 10:44:22.473587       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	E0916 10:44:52.906619       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError"
	W0916 10:44:54.075635       1 reflector.go:561] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0916 10:44:54.075684       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError"
	W0916 10:44:55.746627       1 reflector.go:561] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0916 10:44:55.746672       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError"
	
	
	==> kube-proxy [35987f39fe9efffcbcdfe8a1694d2541bd561939f35f2770e06a09f005dcf753] <==
	I0916 10:30:51.148935       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:30:51.266541       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:30:51.266602       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:30:51.307434       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:30:51.307506       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:30:51.310004       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:30:51.310401       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:30:51.310420       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:30:51.312344       1 config.go:199] "Starting service config controller"
	I0916 10:30:51.312371       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:30:51.312398       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:30:51.312403       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:30:51.315085       1 config.go:328] "Starting node config controller"
	I0916 10:30:51.315100       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:30:51.413119       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:30:51.413177       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:30:51.415238       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [808425f96a2291f8e0cf3dfea11339a46bc25f8b4e1f82c29efc8eee8e1d729a] <==
	W0916 10:30:43.815016       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:30:43.815095       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.815504       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.815602       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.815794       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:30:43.815882       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.816048       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:30:43.816126       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.816295       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.817022       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817307       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:30:43.817404       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817601       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.817688       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817801       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:30:43.818028       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817989       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:30:43.818395       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.818315       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 10:30:43.818847       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.818381       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 10:30:43.819065       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.819318       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:30:43.819478       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:30:44.999991       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:44:44 addons-451841 kubelet[1517]: I0916 10:44:44.522990    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:44:44 addons-451841 kubelet[1517]: E0916 10:44:44.525296    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.116904    1517 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"modules\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-modules\") pod \"6a752659-ec3c-4841-8e83-fd916caaebc2\" (UID: \"6a752659-ec3c-4841-8e83-fd916caaebc2\") "
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.116953    1517 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"debugfs\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-debugfs\") pod \"6a752659-ec3c-4841-8e83-fd916caaebc2\" (UID: \"6a752659-ec3c-4841-8e83-fd916caaebc2\") "
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.116971    1517 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-host\") pod \"6a752659-ec3c-4841-8e83-fd916caaebc2\" (UID: \"6a752659-ec3c-4841-8e83-fd916caaebc2\") "
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.116994    1517 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bpffs\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-bpffs\") pod \"6a752659-ec3c-4841-8e83-fd916caaebc2\" (UID: \"6a752659-ec3c-4841-8e83-fd916caaebc2\") "
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117027    1517 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4bbk\" (UniqueName: \"kubernetes.io/projected/6a752659-ec3c-4841-8e83-fd916caaebc2-kube-api-access-r4bbk\") pod \"6a752659-ec3c-4841-8e83-fd916caaebc2\" (UID: \"6a752659-ec3c-4841-8e83-fd916caaebc2\") "
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117046    1517 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cgroup\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-cgroup\") pod \"6a752659-ec3c-4841-8e83-fd916caaebc2\" (UID: \"6a752659-ec3c-4841-8e83-fd916caaebc2\") "
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117079    1517 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-run\") pod \"6a752659-ec3c-4841-8e83-fd916caaebc2\" (UID: \"6a752659-ec3c-4841-8e83-fd916caaebc2\") "
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117189    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-run" (OuterVolumeSpecName: "run") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117218    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-modules" (OuterVolumeSpecName: "modules") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "modules". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117235    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-debugfs" (OuterVolumeSpecName: "debugfs") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "debugfs". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117251    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-host" (OuterVolumeSpecName: "host") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117267    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-bpffs" (OuterVolumeSpecName: "bpffs") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "bpffs". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117718    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-cgroup" (OuterVolumeSpecName: "cgroup") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "cgroup". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.121735    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a752659-ec3c-4841-8e83-fd916caaebc2-kube-api-access-r4bbk" (OuterVolumeSpecName: "kube-api-access-r4bbk") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "kube-api-access-r4bbk". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217663    1517 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-r4bbk\" (UniqueName: \"kubernetes.io/projected/6a752659-ec3c-4841-8e83-fd916caaebc2-kube-api-access-r4bbk\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217713    1517 reconciler_common.go:288] "Volume detached for volume \"cgroup\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-cgroup\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217725    1517 reconciler_common.go:288] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-run\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217760    1517 reconciler_common.go:288] "Volume detached for volume \"modules\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-modules\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217774    1517 reconciler_common.go:288] "Volume detached for volume \"debugfs\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-debugfs\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217783    1517 reconciler_common.go:288] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-host\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217791    1517 reconciler_common.go:288] "Volume detached for volume \"bpffs\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-bpffs\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.420396    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:44:53 addons-451841 kubelet[1517]: I0916 10:44:53.525153    1517 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2" path="/var/lib/kubelet/pods/6a752659-ec3c-4841-8e83-fd916caaebc2/volumes"
	
	
	==> storage-provisioner [4ddb5fa614111a21d93d580947f3eb3b791d38fa6e497e66ae259ff6bb7fed15] <==
	I0916 10:30:56.265937       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:30:56.289948       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:30:56.290011       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:30:56.319402       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:30:56.319890       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"ef239dcc-ec3a-4a4d-b0db-6d9c8de888a1", APIVersion:"v1", ResourceVersion:"601", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157 became leader
	I0916 10:30:56.319948       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157!
	I0916 10:30:56.520389       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-451841 -n addons-451841
helpers_test.go:261: (dbg) Run:  kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (443.082µs)
helpers_test.go:263: kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/parallel/Ingress (2.38s)

                                                
                                    
x
+
TestAddons/parallel/MetricsServer (369.13s)

                                                
                                                
=== RUN   TestAddons/parallel/MetricsServer
=== PAUSE TestAddons/parallel/MetricsServer

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer
addons_test.go:409: metrics-server stabilized in 4.213806ms
addons_test.go:411: (dbg) TestAddons/parallel/MetricsServer: waiting 6m0s for pods matching "k8s-app=metrics-server" in namespace "kube-system" ...
helpers_test.go:344: "metrics-server-84c5f94fbc-q47pm" [2252baaa-acbc-43dd-b38e-e8cd59ac7825] Running
addons_test.go:411: (dbg) TestAddons/parallel/MetricsServer: k8s-app=metrics-server healthy within 6.003503218s
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (485.223µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (355.263µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (453.24µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (534.249µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (405.002µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (596.575µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (555.524µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (515.197µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (491.697µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (487.742µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (651.131µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (608.628µs)
addons_test.go:417: (dbg) Run:  kubectl --context addons-451841 top pods -n kube-system
addons_test.go:417: (dbg) Non-zero exit: kubectl --context addons-451841 top pods -n kube-system: fork/exec /usr/local/bin/kubectl: exec format error (785.316µs)
addons_test.go:431: failed checking metric server: fork/exec /usr/local/bin/kubectl: exec format error
addons_test.go:434: (dbg) Run:  out/minikube-linux-arm64 -p addons-451841 addons disable metrics-server --alsologtostderr -v=1
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/parallel/MetricsServer]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-451841
helpers_test.go:235: (dbg) docker inspect addons-451841:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4",
	        "Created": "2024-09-16T10:30:19.386072283Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2064804,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:30:19.514500967Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/hostname",
	        "HostsPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/hosts",
	        "LogPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4-json.log",
	        "Name": "/addons-451841",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-451841:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-451841",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/merged",
	                "UpperDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/diff",
	                "WorkDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-451841",
	                "Source": "/var/lib/docker/volumes/addons-451841/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-451841",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-451841",
	                "name.minikube.sigs.k8s.io": "addons-451841",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "4da7b9dd4db914ae48304dba9ae2b2fb9dab68040bc986bf2751a778e62e4524",
	            "SandboxKey": "/var/run/docker/netns/4da7b9dd4db9",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40577"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40578"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40581"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40579"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40580"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-451841": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "cd1315f485e3267c82ac80908081e901323e720ef1bb26de92d612c54dfd58d8",
	                    "EndpointID": "36f212e2a713c67d6c2ea54e50fbd0d8d7f7eb862ef913caa03a6cbfac71cb21",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-451841",
	                        "8a213d4c4dec"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-451841 -n addons-451841
helpers_test.go:244: <<< TestAddons/parallel/MetricsServer FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/parallel/MetricsServer]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-451841 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-451841 logs -n 25: (1.62719775s)
helpers_test.go:252: TestAddons/parallel/MetricsServer logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                 Args                 |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC |                     |
	|         | -p download-only-911311              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0         |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-911311              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | -o=json --download-only              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | -p download-only-889126              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.31.1         |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-889126              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-911311              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-889126              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | --download-only -p                   | download-docker-956530 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | download-docker-956530               |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | -p download-docker-956530            | download-docker-956530 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | --download-only -p                   | binary-mirror-852743   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | binary-mirror-852743                 |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --binary-mirror                      |                        |         |         |                     |                     |
	|         | http://127.0.0.1:35351               |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-852743              | binary-mirror-852743   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| addons  | disable dashboard -p                 | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| addons  | enable dashboard -p                  | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| start   | -p addons-451841 --wait=true         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:33 UTC |
	|         | --memory=4000 --alsologtostderr      |                        |         |         |                     |                     |
	|         | --addons=registry                    |                        |         |         |                     |                     |
	|         | --addons=metrics-server              |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots             |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver         |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                    |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner               |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget            |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin        |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --addons=ingress                     |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                 |                        |         |         |                     |                     |
	| addons  | addons-451841 addons disable         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | yakd --alsologtostderr -v=1          |                        |         |         |                     |                     |
	| ip      | addons-451841 ip                     | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	| addons  | addons-451841 addons disable         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | registry --alsologtostderr           |                        |         |         |                     |                     |
	|         | -v=1                                 |                        |         |         |                     |                     |
	| addons  | disable nvidia-device-plugin         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | -p addons-451841                     |                        |         |         |                     |                     |
	| addons  | disable cloud-spanner -p             | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| addons  | enable headlamp                      | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | -p addons-451841                     |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1               |                        |         |         |                     |                     |
	| addons  | addons-451841 addons disable         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:39 UTC | 16 Sep 24 10:39 UTC |
	|         | headlamp --alsologtostderr           |                        |         |         |                     |                     |
	|         | -v=1                                 |                        |         |         |                     |                     |
	| addons  | disable inspektor-gadget -p          | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:44 UTC | 16 Sep 24 10:44 UTC |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| addons  | addons-451841 addons                 | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|         | disable metrics-server               |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1               |                        |         |         |                     |                     |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:29:55
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:29:55.756900 2064308 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:29:55.757118 2064308 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:55.757146 2064308 out.go:358] Setting ErrFile to fd 2...
	I0916 10:29:55.757164 2064308 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:55.757443 2064308 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:29:55.757918 2064308 out.go:352] Setting JSON to false
	I0916 10:29:55.758950 2064308 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":137538,"bootTime":1726345058,"procs":192,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:29:55.759050 2064308 start.go:139] virtualization:  
	I0916 10:29:55.762450 2064308 out.go:177] * [addons-451841] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:29:55.765218 2064308 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:29:55.765320 2064308 notify.go:220] Checking for updates...
	I0916 10:29:55.771607 2064308 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:29:55.774426 2064308 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:29:55.777761 2064308 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:29:55.780330 2064308 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:29:55.782904 2064308 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:29:55.785688 2064308 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:29:55.807382 2064308 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:29:55.807515 2064308 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:55.863178 2064308 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:49 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:55.853088898 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:55.863303 2064308 docker.go:318] overlay module found
	I0916 10:29:55.867792 2064308 out.go:177] * Using the docker driver based on user configuration
	I0916 10:29:55.870461 2064308 start.go:297] selected driver: docker
	I0916 10:29:55.870476 2064308 start.go:901] validating driver "docker" against <nil>
	I0916 10:29:55.870490 2064308 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:29:55.871367 2064308 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:55.922454 2064308 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:49 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:55.912678011 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:55.922666 2064308 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:29:55.922995 2064308 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:29:55.925501 2064308 out.go:177] * Using Docker driver with root privileges
	I0916 10:29:55.928402 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:29:55.928468 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:29:55.928481 2064308 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:29:55.928561 2064308 start.go:340] cluster config:
	{Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHA
uthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:29:55.931349 2064308 out.go:177] * Starting "addons-451841" primary control-plane node in "addons-451841" cluster
	I0916 10:29:55.933847 2064308 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:29:55.936549 2064308 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:29:55.939027 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:29:55.939075 2064308 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:29:55.939087 2064308 cache.go:56] Caching tarball of preloaded images
	I0916 10:29:55.939127 2064308 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:29:55.939172 2064308 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:29:55.939183 2064308 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:29:55.939554 2064308 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json ...
	I0916 10:29:55.939585 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json: {Name:mk4b86ccd0e04a15f77246bcc432382e6ef83bd3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:29:55.955829 2064308 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:29:55.955957 2064308 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:29:55.955999 2064308 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:29:55.956009 2064308 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:29:55.956017 2064308 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:29:55.956025 2064308 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:30:13.033213 2064308 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:30:13.033255 2064308 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:30:13.033286 2064308 start.go:360] acquireMachinesLock for addons-451841: {Name:mk3e70771a060125a26a792bbbf3ad5672ad97bd Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:30:13.033421 2064308 start.go:364] duration metric: took 111.614µs to acquireMachinesLock for "addons-451841"
	I0916 10:30:13.033454 2064308 start.go:93] Provisioning new machine with config: &{Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fa
lse CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:30:13.033622 2064308 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:30:13.035916 2064308 out.go:235] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0916 10:30:13.036188 2064308 start.go:159] libmachine.API.Create for "addons-451841" (driver="docker")
	I0916 10:30:13.036228 2064308 client.go:168] LocalClient.Create starting
	I0916 10:30:13.036363 2064308 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 10:30:13.386329 2064308 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 10:30:13.561829 2064308 cli_runner.go:164] Run: docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:30:13.576129 2064308 cli_runner.go:211] docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:30:13.576212 2064308 network_create.go:284] running [docker network inspect addons-451841] to gather additional debugging logs...
	I0916 10:30:13.576235 2064308 cli_runner.go:164] Run: docker network inspect addons-451841
	W0916 10:30:13.591552 2064308 cli_runner.go:211] docker network inspect addons-451841 returned with exit code 1
	I0916 10:30:13.591606 2064308 network_create.go:287] error running [docker network inspect addons-451841]: docker network inspect addons-451841: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-451841 not found
	I0916 10:30:13.591621 2064308 network_create.go:289] output of [docker network inspect addons-451841]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-451841 not found
	
	** /stderr **
	I0916 10:30:13.591720 2064308 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:30:13.608306 2064308 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001aeacb0}
	I0916 10:30:13.608356 2064308 network_create.go:124] attempt to create docker network addons-451841 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:30:13.608420 2064308 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-451841 addons-451841
	I0916 10:30:13.683378 2064308 network_create.go:108] docker network addons-451841 192.168.49.0/24 created
	I0916 10:30:13.683411 2064308 kic.go:121] calculated static IP "192.168.49.2" for the "addons-451841" container
	I0916 10:30:13.683492 2064308 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:30:13.700184 2064308 cli_runner.go:164] Run: docker volume create addons-451841 --label name.minikube.sigs.k8s.io=addons-451841 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:30:13.718068 2064308 oci.go:103] Successfully created a docker volume addons-451841
	I0916 10:30:13.718179 2064308 cli_runner.go:164] Run: docker run --rm --name addons-451841-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --entrypoint /usr/bin/test -v addons-451841:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:30:15.421383 2064308 cli_runner.go:217] Completed: docker run --rm --name addons-451841-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --entrypoint /usr/bin/test -v addons-451841:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib: (1.703150378s)
	I0916 10:30:15.421417 2064308 oci.go:107] Successfully prepared a docker volume addons-451841
	I0916 10:30:15.421439 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:30:15.421458 2064308 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:30:15.421522 2064308 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-451841:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:30:19.320511 2064308 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-451841:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (3.898937148s)
	I0916 10:30:19.320548 2064308 kic.go:203] duration metric: took 3.899086612s to extract preloaded images to volume ...
	W0916 10:30:19.320695 2064308 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:30:19.320803 2064308 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:30:19.371670 2064308 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-451841 --name addons-451841 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-451841 --network addons-451841 --ip 192.168.49.2 --volume addons-451841:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:30:19.674459 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Running}}
	I0916 10:30:19.700795 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:19.725169 2064308 cli_runner.go:164] Run: docker exec addons-451841 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:30:19.777409 2064308 oci.go:144] the created container "addons-451841" has a running status.
	I0916 10:30:19.777438 2064308 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa...
	I0916 10:30:20.426549 2064308 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:30:20.459111 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:20.485764 2064308 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:30:20.485788 2064308 kic_runner.go:114] Args: [docker exec --privileged addons-451841 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:30:20.553044 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:20.584488 2064308 machine.go:93] provisionDockerMachine start ...
	I0916 10:30:20.584585 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.604705 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.605002 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.605024 2064308 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:30:20.750295 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-451841
	
	I0916 10:30:20.750323 2064308 ubuntu.go:169] provisioning hostname "addons-451841"
	I0916 10:30:20.750394 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.772671 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.772910 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.772922 2064308 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-451841 && echo "addons-451841" | sudo tee /etc/hostname
	I0916 10:30:20.923316 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-451841
	
	I0916 10:30:20.923448 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.940021 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.940274 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.940298 2064308 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-451841' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-451841/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-451841' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:30:21.087110 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:30:21.087184 2064308 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:30:21.087263 2064308 ubuntu.go:177] setting up certificates
	I0916 10:30:21.087293 2064308 provision.go:84] configureAuth start
	I0916 10:30:21.087450 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.105254 2064308 provision.go:143] copyHostCerts
	I0916 10:30:21.105342 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:30:21.105468 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:30:21.105537 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:30:21.105585 2064308 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.addons-451841 san=[127.0.0.1 192.168.49.2 addons-451841 localhost minikube]
	I0916 10:30:21.497343 2064308 provision.go:177] copyRemoteCerts
	I0916 10:30:21.497413 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:30:21.497456 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.514957 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.611658 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:30:21.636890 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:30:21.662172 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:30:21.686808 2064308 provision.go:87] duration metric: took 599.477164ms to configureAuth
	I0916 10:30:21.686873 2064308 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:30:21.687116 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:21.687133 2064308 machine.go:96] duration metric: took 1.102625588s to provisionDockerMachine
	I0916 10:30:21.687141 2064308 client.go:171] duration metric: took 8.650903893s to LocalClient.Create
	I0916 10:30:21.687161 2064308 start.go:167] duration metric: took 8.650974974s to libmachine.API.Create "addons-451841"
	I0916 10:30:21.687171 2064308 start.go:293] postStartSetup for "addons-451841" (driver="docker")
	I0916 10:30:21.687182 2064308 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:30:21.687249 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:30:21.687299 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.706431 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.804065 2064308 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:30:21.807409 2064308 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:30:21.807450 2064308 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:30:21.807462 2064308 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:30:21.807470 2064308 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:30:21.807482 2064308 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:30:21.807551 2064308 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:30:21.807581 2064308 start.go:296] duration metric: took 120.403063ms for postStartSetup
	I0916 10:30:21.807904 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.824820 2064308 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json ...
	I0916 10:30:21.825120 2064308 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:30:21.825171 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.841557 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.935711 2064308 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:30:21.940289 2064308 start.go:128] duration metric: took 8.906649729s to createHost
	I0916 10:30:21.940328 2064308 start.go:83] releasing machines lock for "addons-451841", held for 8.906892895s
	I0916 10:30:21.940401 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.957512 2064308 ssh_runner.go:195] Run: cat /version.json
	I0916 10:30:21.957582 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.957842 2064308 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:30:21.957901 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.986070 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.992358 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:22.209233 2064308 ssh_runner.go:195] Run: systemctl --version
	I0916 10:30:22.213896 2064308 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:30:22.218111 2064308 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:30:22.243931 2064308 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:30:22.244032 2064308 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:30:22.274074 2064308 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:30:22.274104 2064308 start.go:495] detecting cgroup driver to use...
	I0916 10:30:22.274139 2064308 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:30:22.274194 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:30:22.287113 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:30:22.299302 2064308 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:30:22.299412 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:30:22.313515 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:30:22.327839 2064308 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:30:22.409410 2064308 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:30:22.494962 2064308 docker.go:233] disabling docker service ...
	I0916 10:30:22.495100 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:30:22.515205 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:30:22.527495 2064308 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:30:22.611444 2064308 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:30:22.705471 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:30:22.717496 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:30:22.735435 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:30:22.746124 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:30:22.757226 2064308 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:30:22.757299 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:30:22.767541 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:30:22.779039 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:30:22.788821 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:30:22.799244 2064308 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:30:22.808704 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:30:22.820713 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:30:22.831851 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:30:22.842394 2064308 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:30:22.851545 2064308 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:30:22.860424 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:22.961475 2064308 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:30:23.100987 2064308 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:30:23.101138 2064308 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:30:23.105001 2064308 start.go:563] Will wait 60s for crictl version
	I0916 10:30:23.105079 2064308 ssh_runner.go:195] Run: which crictl
	I0916 10:30:23.108696 2064308 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:30:23.154724 2064308 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:30:23.154812 2064308 ssh_runner.go:195] Run: containerd --version
	I0916 10:30:23.179902 2064308 ssh_runner.go:195] Run: containerd --version
	I0916 10:30:23.208730 2064308 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:30:23.210246 2064308 cli_runner.go:164] Run: docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:30:23.225302 2064308 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:30:23.229071 2064308 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:30:23.240048 2064308 kubeadm.go:883] updating cluster {Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Cus
tomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:30:23.240172 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:30:23.240246 2064308 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:30:23.276242 2064308 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:30:23.276266 2064308 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:30:23.276331 2064308 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:30:23.312895 2064308 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:30:23.312924 2064308 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:30:23.312933 2064308 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 containerd true true} ...
	I0916 10:30:23.313028 2064308 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=addons-451841 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:30:23.313095 2064308 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:30:23.348552 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:30:23.348577 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:30:23.348587 2064308 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:30:23.348609 2064308 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-451841 NodeName:addons-451841 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc
/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:30:23.348742 2064308 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "addons-451841"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:30:23.348817 2064308 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:30:23.357634 2064308 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:30:23.357705 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:30:23.366468 2064308 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:30:23.385942 2064308 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:30:23.404422 2064308 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2167 bytes)
	I0916 10:30:23.422831 2064308 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:30:23.426382 2064308 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:30:23.437337 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:23.533359 2064308 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:30:23.547523 2064308 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841 for IP: 192.168.49.2
	I0916 10:30:23.547546 2064308 certs.go:194] generating shared ca certs ...
	I0916 10:30:23.547562 2064308 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:23.548238 2064308 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:30:24.056004 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt ...
	I0916 10:30:24.056043 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt: {Name:mk8fa0c4ced40ca68ac874100ce374f588dfea0b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.056261 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key ...
	I0916 10:30:24.056276 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key: {Name:mk04aab579c9f6bfd22c8de7442d64e7264cf4f3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.056381 2064308 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:30:24.923761 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt ...
	I0916 10:30:24.923793 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt: {Name:mke93617c0d085600c816f9e0c290a24fbe662eb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.923996 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key ...
	I0916 10:30:24.924009 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key: {Name:mk45200538cf11f718e98e7cfef8cbfcd0dafedf Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.924099 2064308 certs.go:256] generating profile certs ...
	I0916 10:30:24.924161 2064308 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key
	I0916 10:30:24.924189 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt with IP's: []
	I0916 10:30:25.053524 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt ...
	I0916 10:30:25.053557 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: {Name:mk37fa0b7d204f82c8af039a0f580deae8708ef5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.053750 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key ...
	I0916 10:30:25.053764 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key: {Name:mkdb13343be22c0a0f72ff55f3a3cbca00768e68 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.053853 2064308 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707
	I0916 10:30:25.053877 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0916 10:30:25.726904 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 ...
	I0916 10:30:25.726937 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707: {Name:mkf1dd897eefb9f7916ec8408e62b2271e638207 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.727141 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707 ...
	I0916 10:30:25.727156 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707: {Name:mkfbc7b493bc2e7d0b9e7f941111c820f07e3e82 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.727261 2064308 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt
	I0916 10:30:25.727361 2064308 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key
	I0916 10:30:25.727418 2064308 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key
	I0916 10:30:25.727439 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt with IP's: []
	I0916 10:30:26.011801 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt ...
	I0916 10:30:26.011842 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt: {Name:mkb21e9e32e986ac8dbc5fbe6c0db427fdb116ee Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:26.012049 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key ...
	I0916 10:30:26.012065 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key: {Name:mk95b366411d26459b0f1e143cac6384a51d5dfb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:26.012320 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:30:26.012368 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:30:26.012401 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:30:26.012429 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:30:26.013083 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:30:26.039152 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:30:26.064366 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:30:26.093086 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:30:26.116868 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0916 10:30:26.141663 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:30:26.166725 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:30:26.191142 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:30:26.214975 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:30:26.238979 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:30:26.256459 2064308 ssh_runner.go:195] Run: openssl version
	I0916 10:30:26.262089 2064308 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:30:26.271478 2064308 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.274966 2064308 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.275035 2064308 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.281888 2064308 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:30:26.291290 2064308 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:30:26.294471 2064308 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:30:26.294534 2064308 kubeadm.go:392] StartCluster: {Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Custom
QemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:30:26.294629 2064308 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:30:26.294715 2064308 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:30:26.332648 2064308 cri.go:89] found id: ""
	I0916 10:30:26.332740 2064308 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:30:26.341585 2064308 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:30:26.350524 2064308 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:30:26.350588 2064308 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:30:26.359218 2064308 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:30:26.359240 2064308 kubeadm.go:157] found existing configuration files:
	
	I0916 10:30:26.359319 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:30:26.368227 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:30:26.368297 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:30:26.377781 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:30:26.386494 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:30:26.386567 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:30:26.394932 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:30:26.403622 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:30:26.403687 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:30:26.412005 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:30:26.420862 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:30:26.420957 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:30:26.429543 2064308 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:30:26.471767 2064308 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:30:26.472019 2064308 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:30:26.498827 2064308 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:30:26.498904 2064308 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:30:26.498947 2064308 kubeadm.go:310] OS: Linux
	I0916 10:30:26.498998 2064308 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:30:26.499052 2064308 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:30:26.499103 2064308 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:30:26.499154 2064308 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:30:26.499218 2064308 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:30:26.499270 2064308 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:30:26.499320 2064308 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:30:26.499375 2064308 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:30:26.499426 2064308 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:30:26.577650 2064308 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:30:26.577762 2064308 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:30:26.577859 2064308 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:30:26.583045 2064308 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:30:26.586527 2064308 out.go:235]   - Generating certificates and keys ...
	I0916 10:30:26.586988 2064308 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:30:26.587103 2064308 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:30:26.754645 2064308 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:30:27.554793 2064308 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:30:28.039725 2064308 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:30:28.690015 2064308 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:30:29.764620 2064308 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:30:29.764907 2064308 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [addons-451841 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:30:30.341274 2064308 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:30:30.342274 2064308 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [addons-451841 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:30:30.576739 2064308 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:30:31.765912 2064308 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:30:33.601844 2064308 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:30:33.602129 2064308 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:30:34.584274 2064308 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:30:35.213888 2064308 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:30:35.990415 2064308 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:30:36.165269 2064308 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:30:36.564139 2064308 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:30:36.565009 2064308 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:30:36.568128 2064308 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:30:36.570826 2064308 out.go:235]   - Booting up control plane ...
	I0916 10:30:36.570944 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:30:36.571026 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:30:36.571834 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:30:36.583080 2064308 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:30:36.589082 2064308 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:30:36.589162 2064308 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:30:36.685676 2064308 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:30:36.685796 2064308 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:30:37.686643 2064308 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00127007s
	I0916 10:30:37.686760 2064308 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:30:44.689772 2064308 kubeadm.go:310] [api-check] The API server is healthy after 7.003101119s
	I0916 10:30:44.709044 2064308 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:30:44.727931 2064308 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:30:44.754458 2064308 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:30:44.754737 2064308 kubeadm.go:310] [mark-control-plane] Marking the node addons-451841 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:30:44.766739 2064308 kubeadm.go:310] [bootstrap-token] Using token: dx9pov.rexyyitopznv0w4v
	I0916 10:30:44.769416 2064308 out.go:235]   - Configuring RBAC rules ...
	I0916 10:30:44.769548 2064308 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:30:44.776785 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:30:44.785617 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:30:44.789704 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:30:44.794016 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:30:44.798127 2064308 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:30:45.099673 2064308 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:30:45.534575 2064308 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:30:46.098271 2064308 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:30:46.099422 2064308 kubeadm.go:310] 
	I0916 10:30:46.099510 2064308 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:30:46.099519 2064308 kubeadm.go:310] 
	I0916 10:30:46.099624 2064308 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:30:46.099640 2064308 kubeadm.go:310] 
	I0916 10:30:46.099673 2064308 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:30:46.099733 2064308 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:30:46.099783 2064308 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:30:46.099787 2064308 kubeadm.go:310] 
	I0916 10:30:46.099841 2064308 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:30:46.099846 2064308 kubeadm.go:310] 
	I0916 10:30:46.099898 2064308 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:30:46.099903 2064308 kubeadm.go:310] 
	I0916 10:30:46.099959 2064308 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:30:46.100036 2064308 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:30:46.100108 2064308 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:30:46.100113 2064308 kubeadm.go:310] 
	I0916 10:30:46.100201 2064308 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:30:46.100280 2064308 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:30:46.100285 2064308 kubeadm.go:310] 
	I0916 10:30:46.100377 2064308 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token dx9pov.rexyyitopznv0w4v \
	I0916 10:30:46.100482 2064308 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 10:30:46.100503 2064308 kubeadm.go:310] 	--control-plane 
	I0916 10:30:46.100507 2064308 kubeadm.go:310] 
	I0916 10:30:46.100599 2064308 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:30:46.100604 2064308 kubeadm.go:310] 
	I0916 10:30:46.100684 2064308 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token dx9pov.rexyyitopznv0w4v \
	I0916 10:30:46.100792 2064308 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 10:30:46.104209 2064308 kubeadm.go:310] W0916 10:30:26.468492    1025 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:30:46.104508 2064308 kubeadm.go:310] W0916 10:30:26.469422    1025 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:30:46.104733 2064308 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:30:46.104841 2064308 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:30:46.104863 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:30:46.104872 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:30:46.107753 2064308 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:30:46.110419 2064308 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:30:46.114304 2064308 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:30:46.114327 2064308 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:30:46.132060 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:30:46.405649 2064308 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:30:46.405772 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:46.405844 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-451841 minikube.k8s.io/updated_at=2024_09_16T10_30_46_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=addons-451841 minikube.k8s.io/primary=true
	I0916 10:30:46.544610 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:46.544668 2064308 ops.go:34] apiserver oom_adj: -16
	I0916 10:30:47.045343 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:47.544713 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:48.045593 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:48.545262 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:49.044804 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:49.545373 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:50.045197 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:50.206616 2064308 kubeadm.go:1113] duration metric: took 3.800886781s to wait for elevateKubeSystemPrivileges
	I0916 10:30:50.206650 2064308 kubeadm.go:394] duration metric: took 23.912135022s to StartCluster
	I0916 10:30:50.206760 2064308 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:50.206888 2064308 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:30:50.207291 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:50.207495 2064308 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:30:50.207664 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:30:50.207912 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:50.207954 2064308 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0916 10:30:50.208037 2064308 addons.go:69] Setting yakd=true in profile "addons-451841"
	I0916 10:30:50.208056 2064308 addons.go:234] Setting addon yakd=true in "addons-451841"
	I0916 10:30:50.208079 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.208590 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.209289 2064308 addons.go:69] Setting metrics-server=true in profile "addons-451841"
	I0916 10:30:50.209312 2064308 addons.go:234] Setting addon metrics-server=true in "addons-451841"
	I0916 10:30:50.209362 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.209903 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.210207 2064308 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-451841"
	I0916 10:30:50.210240 2064308 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-451841"
	I0916 10:30:50.210263 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.210767 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.218758 2064308 addons.go:69] Setting registry=true in profile "addons-451841"
	I0916 10:30:50.218798 2064308 addons.go:234] Setting addon registry=true in "addons-451841"
	I0916 10:30:50.218832 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.219427 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.219602 2064308 addons.go:69] Setting cloud-spanner=true in profile "addons-451841"
	I0916 10:30:50.219647 2064308 addons.go:234] Setting addon cloud-spanner=true in "addons-451841"
	I0916 10:30:50.219685 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.221722 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.222266 2064308 addons.go:69] Setting storage-provisioner=true in profile "addons-451841"
	I0916 10:30:50.222288 2064308 addons.go:234] Setting addon storage-provisioner=true in "addons-451841"
	I0916 10:30:50.222314 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.222854 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.243982 2064308 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-451841"
	I0916 10:30:50.244056 2064308 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-451841"
	I0916 10:30:50.244103 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.244878 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.250996 2064308 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-451841"
	I0916 10:30:50.251033 2064308 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-451841"
	I0916 10:30:50.251403 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.262479 2064308 addons.go:69] Setting volcano=true in profile "addons-451841"
	I0916 10:30:50.262526 2064308 addons.go:234] Setting addon volcano=true in "addons-451841"
	I0916 10:30:50.262567 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.263124 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.310432 2064308 addons.go:69] Setting default-storageclass=true in profile "addons-451841"
	I0916 10:30:50.310537 2064308 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-451841"
	I0916 10:30:50.311117 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.312245 2064308 addons.go:69] Setting volumesnapshots=true in profile "addons-451841"
	I0916 10:30:50.312377 2064308 addons.go:234] Setting addon volumesnapshots=true in "addons-451841"
	I0916 10:30:50.312448 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.313757 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.327865 2064308 addons.go:69] Setting gcp-auth=true in profile "addons-451841"
	I0916 10:30:50.327962 2064308 mustload.go:65] Loading cluster: addons-451841
	I0916 10:30:50.330380 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:50.330866 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.331146 2064308 out.go:177] * Verifying Kubernetes components...
	I0916 10:30:50.334941 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:50.349812 2064308 addons.go:69] Setting ingress=true in profile "addons-451841"
	I0916 10:30:50.349850 2064308 addons.go:234] Setting addon ingress=true in "addons-451841"
	I0916 10:30:50.349897 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.350438 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.350648 2064308 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-451841"
	I0916 10:30:50.350723 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.351151 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.369853 2064308 addons.go:69] Setting ingress-dns=true in profile "addons-451841"
	I0916 10:30:50.369893 2064308 addons.go:234] Setting addon ingress-dns=true in "addons-451841"
	I0916 10:30:50.369937 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.370407 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.375867 2064308 out.go:177]   - Using image docker.io/registry:2.8.3
	I0916 10:30:50.382808 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0916 10:30:50.384082 2064308 addons.go:69] Setting inspektor-gadget=true in profile "addons-451841"
	I0916 10:30:50.384111 2064308 addons.go:234] Setting addon inspektor-gadget=true in "addons-451841"
	I0916 10:30:50.384143 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.384714 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.421644 2064308 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.2
	I0916 10:30:50.424401 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0916 10:30:50.424443 2064308 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0916 10:30:50.424517 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.438309 2064308 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.16.2
	I0916 10:30:50.438567 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0916 10:30:50.438585 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (860 bytes)
	I0916 10:30:50.438646 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.439842 2064308 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0916 10:30:50.440236 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-webhook-manager:v1.9.0
	I0916 10:30:50.440402 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.462370 2064308 addons.go:234] Setting addon default-storageclass=true in "addons-451841"
	I0916 10:30:50.462409 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.463191 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.463463 2064308 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:30:50.466889 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0916 10:30:50.467021 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.474834 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0916 10:30:50.474857 2064308 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0916 10:30:50.474919 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.484574 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:30:50.485713 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0916 10:30:50.508488 2064308 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.23
	I0916 10:30:50.525937 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-controller-manager:v1.9.0
	I0916 10:30:50.526169 2064308 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:30:50.526183 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:30:50.526247 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.542222 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0916 10:30:50.543269 2064308 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0916 10:30:50.543418 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0916 10:30:50.543483 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.563839 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-scheduler:v1.9.0
	I0916 10:30:50.567954 2064308 addons.go:431] installing /etc/kubernetes/addons/volcano-deployment.yaml
	I0916 10:30:50.567983 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volcano-deployment.yaml (434001 bytes)
	I0916 10:30:50.568053 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.583888 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.587279 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.3
	I0916 10:30:50.587486 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.11.2
	I0916 10:30:50.589757 2064308 out.go:177]   - Using image docker.io/busybox:stable
	I0916 10:30:50.589894 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0916 10:30:50.592333 2064308 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:30:50.592357 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0916 10:30:50.592588 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.594469 2064308 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0916 10:30:50.594639 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:30:50.596571 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0916 10:30:50.596784 2064308 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:30:50.596798 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0916 10:30:50.596863 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.628847 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:30:50.631659 2064308 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:30:50.631684 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0916 10:30:50.631748 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.645470 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.650239 2064308 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0
	I0916 10:30:50.650364 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0916 10:30:50.650401 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0916 10:30:50.652227 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.653039 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0916 10:30:50.654718 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0916 10:30:50.654790 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.661463 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	I0916 10:30:50.661708 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.654527 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0916 10:30:50.662209 2064308 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0916 10:30:50.662349 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.674173 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0916 10:30:50.676994 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0916 10:30:50.680275 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0916 10:30:50.680305 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0916 10:30:50.680378 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.691037 2064308 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:30:50.691057 2064308 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:30:50.691123 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.774282 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.780046 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.807312 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.827826 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.831006 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.853363 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.867169 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.875051 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.875081 2064308 retry.go:31] will retry after 209.079202ms: ssh: handshake failed: EOF
	I0916 10:30:50.875514 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.878034 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.878076 2064308 retry.go:31] will retry after 358.329045ms: ssh: handshake failed: EOF
	I0916 10:30:50.878970 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.891671 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.913115 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.913144 2064308 retry.go:31] will retry after 291.220359ms: ssh: handshake failed: EOF
	W0916 10:30:51.085514 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:51.085558 2064308 retry.go:31] will retry after 406.090408ms: ssh: handshake failed: EOF
	I0916 10:30:51.380959 2064308 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml": (1.173254923s)
	I0916 10:30:51.381043 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:30:51.381158 2064308 ssh_runner.go:235] Completed: sudo systemctl daemon-reload: (1.046146925s)
	I0916 10:30:51.381191 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:30:51.381193 2064308 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:30:51.393457 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0916 10:30:51.393478 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0916 10:30:51.405074 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:30:51.536141 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:30:51.553523 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0916 10:30:51.553553 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0916 10:30:51.664299 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0916 10:30:51.664331 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0916 10:30:51.694553 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0916 10:30:51.694580 2064308 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0916 10:30:51.695380 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:30:51.703369 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0916 10:30:51.703394 2064308 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0916 10:30:51.711436 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0916 10:30:51.711460 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0916 10:30:51.716209 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml
	I0916 10:30:51.833745 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0916 10:30:51.872114 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0916 10:30:51.872156 2064308 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0916 10:30:51.879573 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0916 10:30:51.879603 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0916 10:30:51.894115 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:30:51.927534 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0916 10:30:51.927573 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0916 10:30:51.967967 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0916 10:30:51.967997 2064308 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0916 10:30:51.987647 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:30:51.987672 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0916 10:30:52.018799 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0916 10:30:52.018835 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0916 10:30:52.040829 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:30:52.040863 2064308 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0916 10:30:52.062309 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0916 10:30:52.062358 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0916 10:30:52.141020 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0916 10:30:52.141055 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0916 10:30:52.150339 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:30:52.156143 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0916 10:30:52.156182 2064308 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0916 10:30:52.267727 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0916 10:30:52.267754 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0916 10:30:52.278358 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0916 10:30:52.278410 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0916 10:30:52.295260 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:30:52.315396 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:30:52.396988 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0916 10:30:52.397029 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0916 10:30:52.414014 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:30:52.414040 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0916 10:30:52.514419 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0916 10:30:52.514447 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0916 10:30:52.534199 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0916 10:30:52.534239 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0916 10:30:52.671597 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (1.290515457s)
	I0916 10:30:52.715081 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0916 10:30:52.715111 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0916 10:30:52.719191 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:30:52.832284 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0916 10:30:52.832313 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0916 10:30:52.924488 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0916 10:30:52.924521 2064308 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0916 10:30:53.168769 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:30:53.168802 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0916 10:30:53.177620 2064308 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (1.796357981s)
	I0916 10:30:53.177657 2064308 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:30:53.177732 2064308 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (1.79643144s)
	I0916 10:30:53.179507 2064308 node_ready.go:35] waiting up to 6m0s for node "addons-451841" to be "Ready" ...
	I0916 10:30:53.184404 2064308 node_ready.go:49] node "addons-451841" has status "Ready":"True"
	I0916 10:30:53.184443 2064308 node_ready.go:38] duration metric: took 4.710029ms for node "addons-451841" to be "Ready" ...
	I0916 10:30:53.184458 2064308 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:30:53.197525 2064308 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace to be "Ready" ...
	I0916 10:30:53.269899 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0916 10:30:53.269941 2064308 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0916 10:30:53.282557 2064308 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:30:53.282590 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0916 10:30:53.466493 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0916 10:30:53.466519 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0916 10:30:53.471643 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:30:53.602578 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:30:53.683185 2064308 kapi.go:214] "coredns" deployment in "kube-system" namespace and "addons-451841" context rescaled to 1 replicas
	I0916 10:30:53.701295 2064308 pod_ready.go:98] error getting pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bvmss" not found
	I0916 10:30:53.701323 2064308 pod_ready.go:82] duration metric: took 503.765362ms for pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace to be "Ready" ...
	E0916 10:30:53.701335 2064308 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bvmss" not found
	I0916 10:30:53.701342 2064308 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace to be "Ready" ...
	I0916 10:30:53.722187 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0916 10:30:53.722214 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0916 10:30:54.162813 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:30:54.162856 2064308 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0916 10:30:54.507449 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:30:55.304651 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (3.899501244s)
	I0916 10:30:55.634996 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (3.939582165s)
	I0916 10:30:55.635110 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (4.098941534s)
	I0916 10:30:55.711983 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:30:57.666996 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0916 10:30:57.667089 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:57.696419 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:57.712916 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:30:58.304674 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0916 10:30:58.423037 2064308 addons.go:234] Setting addon gcp-auth=true in "addons-451841"
	I0916 10:30:58.423145 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:58.423647 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:58.453963 2064308 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0916 10:30:58.454022 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:58.488418 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:59.724111 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:01.085964 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml: (9.369716418s)
	I0916 10:31:01.086088 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (9.252309206s)
	I0916 10:31:01.086143 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (9.191998071s)
	I0916 10:31:01.086179 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (8.93580956s)
	I0916 10:31:01.086966 2064308 addons.go:475] Verifying addon registry=true in "addons-451841"
	I0916 10:31:01.086280 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (8.790993869s)
	I0916 10:31:01.087161 2064308 addons.go:475] Verifying addon ingress=true in "addons-451841"
	I0916 10:31:01.086364 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (8.367136002s)
	I0916 10:31:01.086423 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (7.614752608s)
	I0916 10:31:01.086494 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (7.483872887s)
	I0916 10:31:01.086607 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (8.771185269s)
	I0916 10:31:01.087690 2064308 addons.go:475] Verifying addon metrics-server=true in "addons-451841"
	W0916 10:31:01.087784 2064308 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:31:01.087807 2064308 retry.go:31] will retry after 241.995667ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:31:01.089709 2064308 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-451841 service yakd-dashboard -n yakd-dashboard
	
	I0916 10:31:01.089714 2064308 out.go:177] * Verifying ingress addon...
	I0916 10:31:01.089782 2064308 out.go:177] * Verifying registry addon...
	I0916 10:31:01.092615 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0916 10:31:01.093670 2064308 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0916 10:31:01.146629 2064308 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0916 10:31:01.146661 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:01.147988 2064308 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:31:01.148013 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:01.330778 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:31:01.607432 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:01.608116 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:01.783267 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (7.275754054s)
	I0916 10:31:01.783417 2064308 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-451841"
	I0916 10:31:01.783367 2064308 ssh_runner.go:235] Completed: cat /var/lib/minikube/google_application_credentials.json: (3.329378043s)
	I0916 10:31:01.785766 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:31:01.785796 2064308 out.go:177] * Verifying csi-hostpath-driver addon...
	I0916 10:31:01.788664 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0916 10:31:01.789894 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0916 10:31:01.794958 2064308 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:31:01.795006 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:01.797295 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0916 10:31:01.797332 2064308 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0916 10:31:01.893997 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0916 10:31:01.894071 2064308 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0916 10:31:01.937742 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:31:01.937810 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0916 10:31:01.987240 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:31:02.097286 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:02.100875 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:02.209340 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:02.305635 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:02.597370 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:02.599723 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:02.795942 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:03.100397 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:03.103196 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:03.312002 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:03.374850 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml: (1.387566006s)
	I0916 10:31:03.375988 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (2.045090366s)
	I0916 10:31:03.378127 2064308 addons.go:475] Verifying addon gcp-auth=true in "addons-451841"
	I0916 10:31:03.381777 2064308 out.go:177] * Verifying gcp-auth addon...
	I0916 10:31:03.384298 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0916 10:31:03.406084 2064308 kapi.go:86] Found 0 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:31:03.599867 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:03.600481 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:03.720241 2064308 pod_ready.go:93] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.720276 2064308 pod_ready.go:82] duration metric: took 10.018926311s for pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.720289 2064308 pod_ready.go:79] waiting up to 6m0s for pod "etcd-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.746042 2064308 pod_ready.go:93] pod "etcd-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.746067 2064308 pod_ready.go:82] duration metric: took 25.771231ms for pod "etcd-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.746081 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.752533 2064308 pod_ready.go:93] pod "kube-apiserver-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.752559 2064308 pod_ready.go:82] duration metric: took 6.470582ms for pod "kube-apiserver-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.752571 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.758462 2064308 pod_ready.go:93] pod "kube-controller-manager-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.758495 2064308 pod_ready.go:82] duration metric: took 5.916018ms for pod "kube-controller-manager-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.758507 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tltkn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.765336 2064308 pod_ready.go:93] pod "kube-proxy-tltkn" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.765369 2064308 pod_ready.go:82] duration metric: took 6.854119ms for pod "kube-proxy-tltkn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.765382 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.795811 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:04.099344 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:04.100673 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:04.104903 2064308 pod_ready.go:93] pod "kube-scheduler-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:04.104972 2064308 pod_ready.go:82] duration metric: took 339.581954ms for pod "kube-scheduler-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:04.104999 2064308 pod_ready.go:79] waiting up to 6m0s for pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:04.295860 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:04.598910 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:04.602815 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:04.795954 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:05.100224 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:05.101534 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:05.296166 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:05.599439 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:05.601426 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:05.795442 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:06.102393 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:06.103036 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:06.122130 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:06.299045 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:06.599932 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:06.601206 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:06.814263 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:07.096848 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:07.101223 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:07.295217 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:07.599444 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:07.600431 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:07.795082 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:08.101892 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:08.102976 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:08.296014 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:08.598395 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:08.598643 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:08.613020 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:08.795739 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:09.103941 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:09.104967 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:09.295694 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:09.599659 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:09.601180 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:09.796354 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:10.098446 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:10.099577 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:10.295198 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:10.597281 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:10.599286 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:10.616287 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:10.795720 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:11.097801 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:11.099342 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:11.297048 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:11.599247 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:11.599974 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:11.794513 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:12.097432 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:12.099058 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:12.295097 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:12.598578 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:12.599897 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:12.796822 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:13.096898 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:13.098940 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:13.112547 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:13.295802 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:13.599642 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:13.600761 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:13.794583 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:14.096452 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:14.098429 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:14.297517 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:14.598010 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:14.599983 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:14.795140 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:15.104125 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:15.104975 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:15.113778 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:15.295679 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:15.598018 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:15.598555 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:15.795791 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:16.096811 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:16.099236 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:16.296057 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:16.597945 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:16.599646 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:16.797262 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:17.098985 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:17.099689 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:17.295469 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:17.599269 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:17.600683 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:17.611951 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:17.794427 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:18.099862 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:18.101710 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:18.296191 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:18.596772 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:18.600049 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:18.811403 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:19.098130 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:19.099143 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:19.111509 2064308 pod_ready.go:93] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:19.111570 2064308 pod_ready.go:82] duration metric: took 15.006549742s for pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:19.111587 2064308 pod_ready.go:39] duration metric: took 25.927112572s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:31:19.111604 2064308 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:31:19.111670 2064308 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:31:19.162518 2064308 api_server.go:72] duration metric: took 28.954985289s to wait for apiserver process to appear ...
	I0916 10:31:19.162546 2064308 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:31:19.162572 2064308 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:31:19.179642 2064308 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:31:19.180628 2064308 api_server.go:141] control plane version: v1.31.1
	I0916 10:31:19.180658 2064308 api_server.go:131] duration metric: took 18.103285ms to wait for apiserver health ...
	I0916 10:31:19.180668 2064308 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:31:19.201200 2064308 system_pods.go:59] 18 kube-system pods found
	I0916 10:31:19.201280 2064308 system_pods.go:61] "coredns-7c65d6cfc9-jqthn" [bc44b698-a863-4ab8-85d8-5bd54d141bd3] Running
	I0916 10:31:19.201299 2064308 system_pods.go:61] "csi-hostpath-attacher-0" [b2e317f7-5d09-4cd6-823d-cc849d3bf9e2] Running
	I0916 10:31:19.201316 2064308 system_pods.go:61] "csi-hostpath-resizer-0" [1d31c631-4247-4ffe-8a87-9b11803bc389] Running
	I0916 10:31:19.201350 2064308 system_pods.go:61] "csi-hostpathplugin-r28vj" [ad580f65-4a4a-445c-bec7-b518245397ea] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0916 10:31:19.201372 2064308 system_pods.go:61] "etcd-addons-451841" [1a9d168e-83c3-4fc0-b188-1da688352b51] Running
	I0916 10:31:19.201392 2064308 system_pods.go:61] "kindnet-zckxr" [b831bf61-6a35-4681-afe4-f5a0f875a7b5] Running
	I0916 10:31:19.201409 2064308 system_pods.go:61] "kube-apiserver-addons-451841" [db38aff8-23ff-466f-a1b0-ab5f2041183c] Running
	I0916 10:31:19.201425 2064308 system_pods.go:61] "kube-controller-manager-addons-451841" [53d22d78-137d-4306-b8df-d9a55061c9df] Running
	I0916 10:31:19.201458 2064308 system_pods.go:61] "kube-ingress-dns-minikube" [c77fac29-39b0-4e39-94f6-a72ac395b130] Running
	I0916 10:31:19.201483 2064308 system_pods.go:61] "kube-proxy-tltkn" [073e7c9c-1ec1-45db-9603-68862d546266] Running
	I0916 10:31:19.201501 2064308 system_pods.go:61] "kube-scheduler-addons-451841" [5b953c55-7b45-4221-89bf-177d1a4efd05] Running
	I0916 10:31:19.201520 2064308 system_pods.go:61] "metrics-server-84c5f94fbc-q47pm" [2252baaa-acbc-43dd-b38e-e8cd59ac7825] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0916 10:31:19.201537 2064308 system_pods.go:61] "nvidia-device-plugin-daemonset-l6r5c" [9645617a-ab29-4c4e-a4ec-4ea217ffb5fb] Running
	I0916 10:31:19.201566 2064308 system_pods.go:61] "registry-66c9cd494c-l957b" [17af08bf-9965-4bec-8d1b-0c4c37167ac1] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
	I0916 10:31:19.201591 2064308 system_pods.go:61] "registry-proxy-9cpxl" [10361d7a-9abb-41e6-88eb-2194d01e1301] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0916 10:31:19.201613 2064308 system_pods.go:61] "snapshot-controller-56fcc65765-6llf9" [7febb7b8-15ce-499d-83f7-b82e03e35902] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.201634 2064308 system_pods.go:61] "snapshot-controller-56fcc65765-qxvll" [0a01a9d7-31e0-4965-baba-078f44b17fac] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.201663 2064308 system_pods.go:61] "storage-provisioner" [6a86a07f-e25d-4ac9-9c07-dc9ae0048083] Running
	I0916 10:31:19.201686 2064308 system_pods.go:74] duration metric: took 21.010389ms to wait for pod list to return data ...
	I0916 10:31:19.201707 2064308 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:31:19.204845 2064308 default_sa.go:45] found service account: "default"
	I0916 10:31:19.204868 2064308 default_sa.go:55] duration metric: took 3.144001ms for default service account to be created ...
	I0916 10:31:19.204877 2064308 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:31:19.219489 2064308 system_pods.go:86] 18 kube-system pods found
	I0916 10:31:19.219563 2064308 system_pods.go:89] "coredns-7c65d6cfc9-jqthn" [bc44b698-a863-4ab8-85d8-5bd54d141bd3] Running
	I0916 10:31:19.219586 2064308 system_pods.go:89] "csi-hostpath-attacher-0" [b2e317f7-5d09-4cd6-823d-cc849d3bf9e2] Running
	I0916 10:31:19.219605 2064308 system_pods.go:89] "csi-hostpath-resizer-0" [1d31c631-4247-4ffe-8a87-9b11803bc389] Running
	I0916 10:31:19.219640 2064308 system_pods.go:89] "csi-hostpathplugin-r28vj" [ad580f65-4a4a-445c-bec7-b518245397ea] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0916 10:31:19.219669 2064308 system_pods.go:89] "etcd-addons-451841" [1a9d168e-83c3-4fc0-b188-1da688352b51] Running
	I0916 10:31:19.219690 2064308 system_pods.go:89] "kindnet-zckxr" [b831bf61-6a35-4681-afe4-f5a0f875a7b5] Running
	I0916 10:31:19.219710 2064308 system_pods.go:89] "kube-apiserver-addons-451841" [db38aff8-23ff-466f-a1b0-ab5f2041183c] Running
	I0916 10:31:19.219728 2064308 system_pods.go:89] "kube-controller-manager-addons-451841" [53d22d78-137d-4306-b8df-d9a55061c9df] Running
	I0916 10:31:19.219766 2064308 system_pods.go:89] "kube-ingress-dns-minikube" [c77fac29-39b0-4e39-94f6-a72ac395b130] Running
	I0916 10:31:19.219784 2064308 system_pods.go:89] "kube-proxy-tltkn" [073e7c9c-1ec1-45db-9603-68862d546266] Running
	I0916 10:31:19.219799 2064308 system_pods.go:89] "kube-scheduler-addons-451841" [5b953c55-7b45-4221-89bf-177d1a4efd05] Running
	I0916 10:31:19.219819 2064308 system_pods.go:89] "metrics-server-84c5f94fbc-q47pm" [2252baaa-acbc-43dd-b38e-e8cd59ac7825] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0916 10:31:19.219847 2064308 system_pods.go:89] "nvidia-device-plugin-daemonset-l6r5c" [9645617a-ab29-4c4e-a4ec-4ea217ffb5fb] Running
	I0916 10:31:19.219875 2064308 system_pods.go:89] "registry-66c9cd494c-l957b" [17af08bf-9965-4bec-8d1b-0c4c37167ac1] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
	I0916 10:31:19.219896 2064308 system_pods.go:89] "registry-proxy-9cpxl" [10361d7a-9abb-41e6-88eb-2194d01e1301] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0916 10:31:19.219915 2064308 system_pods.go:89] "snapshot-controller-56fcc65765-6llf9" [7febb7b8-15ce-499d-83f7-b82e03e35902] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.219935 2064308 system_pods.go:89] "snapshot-controller-56fcc65765-qxvll" [0a01a9d7-31e0-4965-baba-078f44b17fac] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.219968 2064308 system_pods.go:89] "storage-provisioner" [6a86a07f-e25d-4ac9-9c07-dc9ae0048083] Running
	I0916 10:31:19.219989 2064308 system_pods.go:126] duration metric: took 15.104177ms to wait for k8s-apps to be running ...
	I0916 10:31:19.220008 2064308 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:31:19.220090 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:31:19.263162 2064308 system_svc.go:56] duration metric: took 43.144676ms WaitForService to wait for kubelet
	I0916 10:31:19.263243 2064308 kubeadm.go:582] duration metric: took 29.055714708s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:31:19.263279 2064308 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:31:19.272478 2064308 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:31:19.272561 2064308 node_conditions.go:123] node cpu capacity is 2
	I0916 10:31:19.272591 2064308 node_conditions.go:105] duration metric: took 9.29091ms to run NodePressure ...
	I0916 10:31:19.272616 2064308 start.go:241] waiting for startup goroutines ...
	I0916 10:31:19.305039 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:19.605207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:19.605801 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:19.797193 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:20.099691 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:20.101048 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:20.295291 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:20.597682 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:20.598569 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:20.797887 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:21.096766 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:21.099258 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:21.294755 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:21.597973 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:21.600238 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:21.803444 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:22.097870 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:22.100851 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:22.295006 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:22.597700 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:22.598742 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:22.795839 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:23.096175 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:23.098155 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:23.294814 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:23.596166 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:23.598634 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:23.795172 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:24.096643 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:24.099715 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:24.297255 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:24.598721 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:24.599933 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:24.795260 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:25.098369 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:25.101032 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:25.295093 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:25.597734 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:25.597966 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:25.795323 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:26.096041 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:26.099677 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:26.295063 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:26.597593 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:26.599159 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:26.795825 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:27.098811 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:27.099453 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:27.295012 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:27.597182 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:27.601645 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:27.795056 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:28.128064 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:28.129640 2064308 kapi.go:107] duration metric: took 27.037023988s to wait for kubernetes.io/minikube-addons=registry ...
	I0916 10:31:28.325425 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:28.598623 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:28.795615 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:29.104511 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:29.295646 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:29.598962 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:29.795067 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:30.099851 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:30.296647 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:30.598332 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:30.796058 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:31.099992 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:31.294874 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:31.598117 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:31.796531 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:32.098393 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:32.295287 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:32.598055 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:32.795217 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:33.099311 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:33.295339 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:33.598188 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:33.795029 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:34.098345 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:34.295712 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:34.598442 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:34.795386 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:35.098874 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:35.295415 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:35.598136 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:35.795586 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:36.098658 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:36.294379 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:36.598764 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:36.795529 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:37.098523 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:37.296711 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:37.601252 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:37.799472 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:38.100971 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:38.298686 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:38.599535 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:38.795481 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:39.098734 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:39.296827 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:39.611876 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:39.851830 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:40.108718 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:40.295843 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:40.599050 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:40.795575 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:41.098568 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:41.295957 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:41.598039 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:41.796038 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:42.099484 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:42.295707 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:42.598887 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:42.795416 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:43.099107 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:43.295766 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:43.599999 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:43.795242 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:44.098395 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:44.295957 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:44.600054 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:44.794470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:45.100863 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:45.295685 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:45.599065 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:45.798514 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:46.099116 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:46.296057 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:46.599389 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:46.796585 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:47.099083 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:47.296145 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:47.598490 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:47.797079 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:48.100448 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:48.295294 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:48.598227 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:48.794662 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:49.119185 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:49.295351 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:49.598797 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:49.794962 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:50.098374 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:50.295501 2064308 kapi.go:107] duration metric: took 48.505612662s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0916 10:31:50.598550 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:51.098277 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:51.598976 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:52.098206 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:52.597960 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:53.098585 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:53.598884 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:54.098582 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:54.598852 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:55.098478 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:55.598212 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:56.098517 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:56.598412 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:57.098499 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:57.598710 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:58.097637 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:58.599134 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:59.098778 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:59.598318 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:00.130067 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:00.598955 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:01.098901 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:01.598465 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:02.098925 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:02.598148 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:03.102570 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:03.598295 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:04.099028 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:04.598994 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:05.100186 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:05.598454 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:06.098931 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:06.598336 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:07.098800 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:07.599302 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:08.098401 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:08.599190 2064308 kapi.go:107] duration metric: took 1m7.505513413s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0916 10:32:25.388811 2064308 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:32:25.388836 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:25.888825 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:26.388022 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:26.887847 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:27.387834 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:27.888795 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:28.387767 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:28.887542 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:29.388486 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:29.888784 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:30.387676 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:30.888490 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:31.388236 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:31.888242 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:32.387732 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:32.888206 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:33.387868 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:33.887962 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:34.387683 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:34.889279 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:35.388145 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:35.887555 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:36.389045 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:36.887848 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:37.388742 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:37.888016 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:38.388211 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:38.887716 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:39.388708 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:39.888575 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:40.388841 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:40.888385 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:41.388668 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:41.887792 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:42.388021 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:42.888125 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:43.388320 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:43.887796 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:44.388101 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:44.888791 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:45.391207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:45.888190 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:46.387869 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:46.887554 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:47.388470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:47.888177 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:48.387903 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:48.888232 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:49.388449 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:49.888527 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:50.388650 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:50.888495 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:51.388590 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:51.888197 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:52.387563 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:52.888238 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:53.388322 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:53.887557 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:54.388664 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:54.888836 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:55.388171 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:55.888180 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:56.388322 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:56.888567 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:57.388117 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:57.887422 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:58.388230 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:58.887872 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:59.396878 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:59.888550 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:00.394252 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:00.887612 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:01.392523 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:01.888091 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:02.393207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:02.887610 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:03.388745 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:03.888344 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:04.388999 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:04.889012 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:05.390448 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:05.888198 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:06.395413 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:06.889275 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:07.387879 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:07.888183 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:08.388311 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:08.888612 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:09.388334 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:09.887931 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:10.387765 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:10.888317 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:11.388557 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:11.887439 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:12.388213 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:12.887810 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:13.388135 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:13.888239 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:14.388445 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:14.889102 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:15.388533 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:15.887383 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:16.388426 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:16.888022 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:17.388399 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:17.887327 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:18.388016 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:18.887470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:19.388533 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:19.889124 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:20.387631 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:20.888484 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:21.388124 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:21.887946 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:22.388268 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:22.887332 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:23.388224 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:23.887844 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:24.387744 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:24.888405 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:25.388231 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:25.888672 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:26.388063 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:26.888126 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:27.387865 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:27.887552 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:28.387806 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:28.887772 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:29.388587 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:29.888551 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:30.387903 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:30.887507 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:31.388609 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:31.888449 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:32.388259 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:32.887834 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:33.388141 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:33.888934 2064308 kapi.go:107] duration metric: took 2m30.504634261s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0916 10:33:33.890859 2064308 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-451841 cluster.
	I0916 10:33:33.892432 2064308 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0916 10:33:33.893920 2064308 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0916 10:33:33.895584 2064308 out.go:177] * Enabled addons: nvidia-device-plugin, storage-provisioner, ingress-dns, storage-provisioner-rancher, volcano, cloud-spanner, metrics-server, inspektor-gadget, yakd, default-storageclass, volumesnapshots, registry, csi-hostpath-driver, ingress, gcp-auth
	I0916 10:33:33.897279 2064308 addons.go:510] duration metric: took 2m43.689318504s for enable addons: enabled=[nvidia-device-plugin storage-provisioner ingress-dns storage-provisioner-rancher volcano cloud-spanner metrics-server inspektor-gadget yakd default-storageclass volumesnapshots registry csi-hostpath-driver ingress gcp-auth]
	I0916 10:33:33.897342 2064308 start.go:246] waiting for cluster config update ...
	I0916 10:33:33.897367 2064308 start.go:255] writing updated cluster config ...
	I0916 10:33:33.898186 2064308 ssh_runner.go:195] Run: rm -f paused
	I0916 10:33:33.906793 2064308 out.go:177] * Done! kubectl is now configured to use "addons-451841" cluster and "default" namespace by default
	E0916 10:33:33.908425 2064308 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                                     ATTEMPT             POD ID              POD
	f90c8869604c5       6ef582f3ec844       11 minutes ago      Running             gcp-auth                                 0                   9038b6b53facd       gcp-auth-89d5ffd79-pw58v
	6f68aecec6aa2       8b46b1cd48760       13 minutes ago      Running             admission                                0                   e3af2951f3794       volcano-admission-77d7d48b68-sjxcs
	dd63136d8d6ac       289a818c8d9c5       13 minutes ago      Running             controller                               0                   b4699f942aa64       ingress-nginx-controller-bc57996ff-rqhcp
	a490639f0e8aa       ee6d597e62dc8       13 minutes ago      Running             csi-snapshotter                          0                   8260f57befdda       csi-hostpathplugin-r28vj
	317ce7462f733       642ded511e141       13 minutes ago      Running             csi-provisioner                          0                   8260f57befdda       csi-hostpathplugin-r28vj
	324d1501e94dc       420193b27261a       13 minutes ago      Exited              patch                                    2                   13d0568de4b9d       ingress-nginx-admission-patch-z2qc7
	7b9cc7b5195ab       922312104da8a       13 minutes ago      Running             liveness-probe                           0                   8260f57befdda       csi-hostpathplugin-r28vj
	bee97d004dc4d       08f6b2990811a       13 minutes ago      Running             hostpath                                 0                   8260f57befdda       csi-hostpathplugin-r28vj
	19ec6d5fbc0fd       8b46b1cd48760       13 minutes ago      Exited              main                                     0                   009b63594d8b2       volcano-admission-init-bz266
	000463bf50714       d9c7ad4c226bf       13 minutes ago      Running             volcano-scheduler                        0                   d9214c0e709d4       volcano-scheduler-576bc46687-xwjbn
	4dc42ec686d73       1505f556b3a7b       13 minutes ago      Running             volcano-controllers                      0                   343744c6dcf07       volcano-controllers-56675bb4d5-2ltwp
	30f0f6b13e6d5       420193b27261a       13 minutes ago      Exited              create                                   0                   993d0544f3868       ingress-nginx-admission-create-4vr4g
	32df8554c702e       4d1e5c3e97420       13 minutes ago      Running             volume-snapshot-controller               0                   864cff1eb40c5       snapshot-controller-56fcc65765-6llf9
	7558a63005c7b       0107d56dbc0be       13 minutes ago      Running             node-driver-registrar                    0                   8260f57befdda       csi-hostpathplugin-r28vj
	d181a00ffae8d       4d1e5c3e97420       13 minutes ago      Running             volume-snapshot-controller               0                   5f5c44341cf11       snapshot-controller-56fcc65765-qxvll
	6b271689ecd4e       7ce2150c8929b       13 minutes ago      Running             local-path-provisioner                   0                   de8add92893e8       local-path-provisioner-86d989889c-qkpm6
	98b48c685a09e       487fa743e1e22       14 minutes ago      Running             csi-resizer                              0                   89cb8ade3231b       csi-hostpath-resizer-0
	2472144c5bc6d       1461903ec4fe9       14 minutes ago      Running             csi-external-health-monitor-controller   0                   8260f57befdda       csi-hostpathplugin-r28vj
	0af6491cd95ee       9a80d518f102c       14 minutes ago      Running             csi-attacher                             0                   0d39436266817       csi-hostpath-attacher-0
	9b811a5c5e80c       35508c2f890c4       14 minutes ago      Running             minikube-ingress-dns                     0                   e1ed027bac8d8       kube-ingress-dns-minikube
	5232ad6b096cb       2f6c962e7b831       14 minutes ago      Running             coredns                                  0                   3ad39eb105298       coredns-7c65d6cfc9-jqthn
	4ddb5fa614111       ba04bb24b9575       14 minutes ago      Running             storage-provisioner                      0                   d0cffc65c18c1       storage-provisioner
	64b671b165f6f       6a23fa8fd2b78       14 minutes ago      Running             kindnet-cni                              0                   bd9ef3e1818e4       kindnet-zckxr
	35987f39fe9ef       24a140c548c07       14 minutes ago      Running             kube-proxy                               0                   6a8ebbdde94be       kube-proxy-tltkn
	8769c148a0bb3       27e3830e14027       14 minutes ago      Running             etcd                                     0                   290d52892953c       etcd-addons-451841
	31da3c8e5867c       279f381cb3736       14 minutes ago      Running             kube-controller-manager                  0                   349d5195292e8       kube-controller-manager-addons-451841
	808425f96a229       7f8aa378bb47d       14 minutes ago      Running             kube-scheduler                           0                   50415da17c7f0       kube-scheduler-addons-451841
	2870b9699fd97       d3f53a98c0a9d       14 minutes ago      Running             kube-apiserver                           0                   1d8868dd2cf0d       kube-apiserver-addons-451841
	
	
	==> containerd <==
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.895364751Z" level=info msg="StopPodSandbox for \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\""
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.895453776Z" level=info msg="Container to stop \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" must be in running or unknown state, current state \"CONTAINER_EXITED\""
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.959130956Z" level=info msg="shim disconnected" id=11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257 namespace=k8s.io
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.959197991Z" level=warning msg="cleaning up after shim disconnected" id=11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257 namespace=k8s.io
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.959209019Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.979468540Z" level=info msg="TearDown network for sandbox \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\" successfully"
	Sep 16 10:44:51 addons-451841 containerd[816]: time="2024-09-16T10:44:51.979721749Z" level=info msg="StopPodSandbox for \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\" returns successfully"
	Sep 16 10:44:52 addons-451841 containerd[816]: time="2024-09-16T10:44:52.421531479Z" level=info msg="RemoveContainer for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\""
	Sep 16 10:44:52 addons-451841 containerd[816]: time="2024-09-16T10:44:52.429315487Z" level=info msg="RemoveContainer for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" returns successfully"
	Sep 16 10:45:22 addons-451841 containerd[816]: time="2024-09-16T10:45:22.147035421Z" level=info msg="StopContainer for \"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e\" with timeout 30 (s)"
	Sep 16 10:45:22 addons-451841 containerd[816]: time="2024-09-16T10:45:22.147485847Z" level=info msg="Stop container \"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e\" with signal terminated"
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.260702924Z" level=info msg="shim disconnected" id=f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e namespace=k8s.io
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.261317772Z" level=warning msg="cleaning up after shim disconnected" id=f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e namespace=k8s.io
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.261414526Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.280622805Z" level=info msg="StopContainer for \"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e\" returns successfully"
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.284316952Z" level=info msg="StopPodSandbox for \"f49e908ac996970cd6f49765843d30b6bb4da5d8624af65a9ba03a2f0e0dcb3e\""
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.284387310Z" level=info msg="Container to stop \"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e\" must be in running or unknown state, current state \"CONTAINER_EXITED\""
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.336981329Z" level=info msg="shim disconnected" id=f49e908ac996970cd6f49765843d30b6bb4da5d8624af65a9ba03a2f0e0dcb3e namespace=k8s.io
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.337044820Z" level=warning msg="cleaning up after shim disconnected" id=f49e908ac996970cd6f49765843d30b6bb4da5d8624af65a9ba03a2f0e0dcb3e namespace=k8s.io
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.337062871Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.390824956Z" level=info msg="TearDown network for sandbox \"f49e908ac996970cd6f49765843d30b6bb4da5d8624af65a9ba03a2f0e0dcb3e\" successfully"
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.390869616Z" level=info msg="StopPodSandbox for \"f49e908ac996970cd6f49765843d30b6bb4da5d8624af65a9ba03a2f0e0dcb3e\" returns successfully"
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.509074423Z" level=info msg="RemoveContainer for \"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e\""
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.516234476Z" level=info msg="RemoveContainer for \"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e\" returns successfully"
	Sep 16 10:45:23 addons-451841 containerd[816]: time="2024-09-16T10:45:23.517058635Z" level=error msg="ContainerStatus for \"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e\" failed" error="rpc error: code = NotFound desc = an error occurred when try to find container \"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e\": not found"
	
	
	==> coredns [5232ad6b096cb39cf18a9c11e936d3dae11b081bd6666741f3c42e78161ed09f] <==
	[INFO] 10.244.0.9:45725 - 37874 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000066576s
	[INFO] 10.244.0.9:59523 - 16440 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002202223s
	[INFO] 10.244.0.9:59523 - 22330 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002459412s
	[INFO] 10.244.0.9:50469 - 36811 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000147232s
	[INFO] 10.244.0.9:50469 - 6599 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000154395s
	[INFO] 10.244.0.9:54670 - 20364 "AAAA IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000113739s
	[INFO] 10.244.0.9:54670 - 51376 "A IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000392596s
	[INFO] 10.244.0.9:37135 - 16205 "A IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000064189s
	[INFO] 10.244.0.9:37135 - 64832 "AAAA IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.00005714s
	[INFO] 10.244.0.9:54223 - 7962 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000052168s
	[INFO] 10.244.0.9:54223 - 14360 "AAAA IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000048632s
	[INFO] 10.244.0.9:33840 - 38805 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001404552s
	[INFO] 10.244.0.9:33840 - 4752 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.00164702s
	[INFO] 10.244.0.9:45027 - 58736 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000075766s
	[INFO] 10.244.0.9:45027 - 39026 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000213093s
	[INFO] 10.244.0.24:51483 - 10090 "AAAA IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000202231s
	[INFO] 10.244.0.24:42195 - 64926 "A IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000154649s
	[INFO] 10.244.0.24:32892 - 59527 "AAAA IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000163215s
	[INFO] 10.244.0.24:47611 - 11902 "A IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000096418s
	[INFO] 10.244.0.24:59950 - 37722 "AAAA IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.00008466s
	[INFO] 10.244.0.24:52002 - 29131 "A IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000089403s
	[INFO] 10.244.0.24:38598 - 65011 "AAAA IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.002266241s
	[INFO] 10.244.0.24:60458 - 11928 "A IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.001809759s
	[INFO] 10.244.0.24:43975 - 30277 "A IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 610 0.001862182s
	[INFO] 10.244.0.24:51154 - 58482 "AAAA IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 240 0.002138832s
	
	
	==> describe nodes <==
	Name:               addons-451841
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-451841
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=addons-451841
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_30_46_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-451841
	Annotations:        csi.volume.kubernetes.io/nodeid: {"hostpath.csi.k8s.io":"addons-451841"}
	                    kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:30:42 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-451841
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:45:23 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:42 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-451841
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 003b87cb77e5465aa882d8df5f5cd5ab
	  System UUID:                21a29522-aef6-4d70-a29b-0ea27731fdbe
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (20 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  gcp-auth                    gcp-auth-89d5ffd79-pw58v                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         12m
	  ingress-nginx               ingress-nginx-controller-bc57996ff-rqhcp    100m (5%)     0 (0%)      90Mi (1%)        0 (0%)         14m
	  kube-system                 coredns-7c65d6cfc9-jqthn                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     14m
	  kube-system                 csi-hostpath-attacher-0                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 csi-hostpath-resizer-0                      0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 csi-hostpathplugin-r28vj                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 etcd-addons-451841                          100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         14m
	  kube-system                 kindnet-zckxr                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      14m
	  kube-system                 kube-apiserver-addons-451841                250m (12%)    0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 kube-controller-manager-addons-451841       200m (10%)    0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 kube-ingress-dns-minikube                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 kube-proxy-tltkn                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 kube-scheduler-addons-451841                100m (5%)     0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 snapshot-controller-56fcc65765-6llf9        0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 snapshot-controller-56fcc65765-qxvll        0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  local-path-storage          local-path-provisioner-86d989889c-qkpm6     0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  volcano-system              volcano-admission-77d7d48b68-sjxcs          0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  volcano-system              volcano-controllers-56675bb4d5-2ltwp        0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	  volcano-system              volcano-scheduler-576bc46687-xwjbn          0 (0%)        0 (0%)      0 (0%)           0 (0%)         14m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             310Mi (3%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 14m                kube-proxy       
	  Normal   NodeAllocatableEnforced  14m                kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 14m                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  14m (x8 over 14m)  kubelet          Node addons-451841 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    14m (x7 over 14m)  kubelet          Node addons-451841 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     14m (x7 over 14m)  kubelet          Node addons-451841 status is now: NodeHasSufficientPID
	  Normal   Starting                 14m                kubelet          Starting kubelet.
	  Normal   Starting                 14m                kubelet          Starting kubelet.
	  Warning  CgroupV1                 14m                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  14m                kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  14m                kubelet          Node addons-451841 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    14m                kubelet          Node addons-451841 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     14m                kubelet          Node addons-451841 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           14m                node-controller  Node addons-451841 event: Registered Node addons-451841 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [8769c148a0bb341cc1dcca117d41b6be795d52ed6e49348d14da26aac1d42f01] <==
	{"level":"info","ts":"2024-09-16T10:30:38.620740Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:30:38.620758Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:30:39.574728Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.574952Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.575045Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.575117Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575193Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575240Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575326Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.581756Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:addons-451841 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:30:39.582006Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:30:39.582134Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:30:39.582418Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:30:39.582525Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:30:39.582434Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.583536Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:30:39.584617Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:30:39.585041Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.590779Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.590980Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.591021Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:30:39.592081Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:40:40.037545Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":1731}
	{"level":"info","ts":"2024-09-16T10:40:40.103709Z","caller":"mvcc/kvstore_compaction.go:69","msg":"finished scheduled compaction","compact-revision":1731,"took":"65.214596ms","hash":4239490720,"current-db-size-bytes":9076736,"current-db-size":"9.1 MB","current-db-size-in-use-bytes":5177344,"current-db-size-in-use":"5.2 MB"}
	{"level":"info","ts":"2024-09-16T10:40:40.103850Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":4239490720,"revision":1731,"compact-revision":-1}
	
	
	==> gcp-auth [f90c8869604c54edfd93d5ef8e6467ed81e6a63fbedf9c5712f155d5d85f40b8] <==
	2024/09/16 10:33:32 GCP Auth Webhook started!
	2024/09/16 10:38:59 Ready to marshal response ...
	2024/09/16 10:38:59 Ready to write response ...
	2024/09/16 10:38:59 Ready to marshal response ...
	2024/09/16 10:38:59 Ready to write response ...
	2024/09/16 10:38:59 Ready to marshal response ...
	2024/09/16 10:38:59 Ready to write response ...
	
	
	==> kernel <==
	 10:45:23 up 1 day, 14:27,  0 users,  load average: 0.50, 0.43, 1.19
	Linux addons-451841 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [64b671b165f6f7bb28b281ddd3fe708221407f35f09389c964253f52887fd626] <==
	I0916 10:43:21.724489       1 main.go:299] handling current node
	I0916 10:43:31.724543       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:31.724584       1 main.go:299] handling current node
	I0916 10:43:41.727823       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:41.727859       1 main.go:299] handling current node
	I0916 10:43:51.720900       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:51.720937       1 main.go:299] handling current node
	I0916 10:44:01.728967       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:01.729002       1 main.go:299] handling current node
	I0916 10:44:11.726855       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:11.726894       1 main.go:299] handling current node
	I0916 10:44:21.723656       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:21.723690       1 main.go:299] handling current node
	I0916 10:44:31.726799       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:31.726835       1 main.go:299] handling current node
	I0916 10:44:41.721111       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:41.721149       1 main.go:299] handling current node
	I0916 10:44:51.721753       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:51.721790       1 main.go:299] handling current node
	I0916 10:45:01.721698       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:45:01.721734       1 main.go:299] handling current node
	I0916 10:45:11.724396       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:45:11.724458       1 main.go:299] handling current node
	I0916 10:45:21.722748       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:45:21.722780       1 main.go:299] handling current node
	
	
	==> kube-apiserver [2870b9699fd97d290c5750a6361bd1eb6ac986ce8fb7e3f9eb6474155c6b1fa8] <==
	W0916 10:32:04.547641       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:05.602664       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.287206       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:06.287255       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:32:06.288974       1 dispatcher.go:225] Failed calling webhook, failing closed mutatepod.volcano.sh: failed calling webhook "mutatepod.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/pods/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.354223       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:06.354265       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:32:06.355906       1 dispatcher.go:225] Failed calling webhook, failing closed mutatepod.volcano.sh: failed calling webhook "mutatepod.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/pods/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.623074       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:07.661520       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:08.760490       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:09.841622       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:10.917089       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:11.983956       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:13.046405       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:14.089526       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:25.289607       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:25.289652       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:33:06.298609       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:33:06.298665       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:33:06.363205       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:33:06.363253       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	I0916 10:38:59.795161       1 alloc.go:330] "allocated clusterIPs" service="headlamp/headlamp" clusterIPs={"IPv4":"10.111.56.3"}
	I0916 10:44:51.852485       1 handler.go:286] Adding GroupVersion gadget.kinvolk.io v1alpha1 to ResourceManager
	W0916 10:44:52.904959       1 cacher.go:171] Terminating all watchers from cacher traces.gadget.kinvolk.io
	
	
	==> kube-controller-manager [31da3c8e5867c3e2a6f4592fba3d201359a6c0c862a2620157496149c91a3b11] <==
	I0916 10:38:59.926177       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="14.113318ms"
	I0916 10:38:59.926285       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="67.922µs"
	I0916 10:38:59.926662       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="38.744µs"
	I0916 10:39:03.454274       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="54.203µs"
	I0916 10:39:03.484504       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="10.723572ms"
	I0916 10:39:03.484593       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="51.142µs"
	I0916 10:39:10.433645       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="6.515µs"
	I0916 10:39:16.650393       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	I0916 10:39:20.588127       1 namespace_controller.go:187] "Namespace has been deleted" logger="namespace-controller" namespace="headlamp"
	I0916 10:44:22.473587       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	E0916 10:44:52.906619       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError"
	W0916 10:44:54.075635       1 reflector.go:561] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0916 10:44:54.075684       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError"
	W0916 10:44:55.746627       1 reflector.go:561] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0916 10:44:55.746672       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError"
	W0916 10:44:59.606177       1 reflector.go:561] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0916 10:44:59.606221       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError"
	I0916 10:45:02.023798       1 namespace_controller.go:187] "Namespace has been deleted" logger="namespace-controller" namespace="gadget"
	W0916 10:45:09.336666       1 reflector.go:561] k8s.io/client-go/metadata/metadatainformer/informer.go:138: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
	E0916 10:45:09.336711       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/metadata/metadatainformer/informer.go:138: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource" logger="UnhandledError"
	I0916 10:45:19.668377       1 shared_informer.go:313] Waiting for caches to sync for resource quota
	I0916 10:45:19.668476       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:45:19.781135       1 shared_informer.go:313] Waiting for caches to sync for garbage collector
	I0916 10:45:19.781199       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:45:22.126216       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/metrics-server-84c5f94fbc" duration="7.5µs"
	
	
	==> kube-proxy [35987f39fe9efffcbcdfe8a1694d2541bd561939f35f2770e06a09f005dcf753] <==
	I0916 10:30:51.148935       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:30:51.266541       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:30:51.266602       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:30:51.307434       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:30:51.307506       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:30:51.310004       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:30:51.310401       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:30:51.310420       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:30:51.312344       1 config.go:199] "Starting service config controller"
	I0916 10:30:51.312371       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:30:51.312398       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:30:51.312403       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:30:51.315085       1 config.go:328] "Starting node config controller"
	I0916 10:30:51.315100       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:30:51.413119       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:30:51.413177       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:30:51.415238       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [808425f96a2291f8e0cf3dfea11339a46bc25f8b4e1f82c29efc8eee8e1d729a] <==
	W0916 10:30:43.815016       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:30:43.815095       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.815504       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.815602       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.815794       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:30:43.815882       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.816048       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:30:43.816126       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.816295       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.817022       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817307       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:30:43.817404       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817601       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.817688       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817801       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:30:43.818028       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817989       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:30:43.818395       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.818315       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 10:30:43.818847       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.818381       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 10:30:43.819065       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.819318       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:30:43.819478       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:30:44.999991       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117218    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-modules" (OuterVolumeSpecName: "modules") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "modules". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117235    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-debugfs" (OuterVolumeSpecName: "debugfs") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "debugfs". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117251    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-host" (OuterVolumeSpecName: "host") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117267    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-bpffs" (OuterVolumeSpecName: "bpffs") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "bpffs". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.117718    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-cgroup" (OuterVolumeSpecName: "cgroup") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "cgroup". PluginName "kubernetes.io/host-path", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.121735    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a752659-ec3c-4841-8e83-fd916caaebc2-kube-api-access-r4bbk" (OuterVolumeSpecName: "kube-api-access-r4bbk") pod "6a752659-ec3c-4841-8e83-fd916caaebc2" (UID: "6a752659-ec3c-4841-8e83-fd916caaebc2"). InnerVolumeSpecName "kube-api-access-r4bbk". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217663    1517 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-r4bbk\" (UniqueName: \"kubernetes.io/projected/6a752659-ec3c-4841-8e83-fd916caaebc2-kube-api-access-r4bbk\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217713    1517 reconciler_common.go:288] "Volume detached for volume \"cgroup\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-cgroup\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217725    1517 reconciler_common.go:288] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-run\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217760    1517 reconciler_common.go:288] "Volume detached for volume \"modules\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-modules\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217774    1517 reconciler_common.go:288] "Volume detached for volume \"debugfs\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-debugfs\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217783    1517 reconciler_common.go:288] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-host\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.217791    1517 reconciler_common.go:288] "Volume detached for volume \"bpffs\" (UniqueName: \"kubernetes.io/host-path/6a752659-ec3c-4841-8e83-fd916caaebc2-bpffs\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:44:52 addons-451841 kubelet[1517]: I0916 10:44:52.420396    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:44:53 addons-451841 kubelet[1517]: I0916 10:44:53.525153    1517 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2" path="/var/lib/kubelet/pods/6a752659-ec3c-4841-8e83-fd916caaebc2/volumes"
	Sep 16 10:45:23 addons-451841 kubelet[1517]: I0916 10:45:23.504104    1517 scope.go:117] "RemoveContainer" containerID="f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e"
	Sep 16 10:45:23 addons-451841 kubelet[1517]: I0916 10:45:23.516531    1517 scope.go:117] "RemoveContainer" containerID="f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e"
	Sep 16 10:45:23 addons-451841 kubelet[1517]: E0916 10:45:23.517290    1517 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = an error occurred when try to find container \"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e\": not found" containerID="f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e"
	Sep 16 10:45:23 addons-451841 kubelet[1517]: I0916 10:45:23.517360    1517 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e"} err="failed to get container status \"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e\": rpc error: code = NotFound desc = an error occurred when try to find container \"f28ea158892d319280c829588dff08396017242530e7656305e5979c3786538e\": not found"
	Sep 16 10:45:23 addons-451841 kubelet[1517]: I0916 10:45:23.553881    1517 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6whm\" (UniqueName: \"kubernetes.io/projected/2252baaa-acbc-43dd-b38e-e8cd59ac7825-kube-api-access-p6whm\") pod \"2252baaa-acbc-43dd-b38e-e8cd59ac7825\" (UID: \"2252baaa-acbc-43dd-b38e-e8cd59ac7825\") "
	Sep 16 10:45:23 addons-451841 kubelet[1517]: I0916 10:45:23.553939    1517 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmp-dir\" (UniqueName: \"kubernetes.io/empty-dir/2252baaa-acbc-43dd-b38e-e8cd59ac7825-tmp-dir\") pod \"2252baaa-acbc-43dd-b38e-e8cd59ac7825\" (UID: \"2252baaa-acbc-43dd-b38e-e8cd59ac7825\") "
	Sep 16 10:45:23 addons-451841 kubelet[1517]: I0916 10:45:23.554419    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2252baaa-acbc-43dd-b38e-e8cd59ac7825-tmp-dir" (OuterVolumeSpecName: "tmp-dir") pod "2252baaa-acbc-43dd-b38e-e8cd59ac7825" (UID: "2252baaa-acbc-43dd-b38e-e8cd59ac7825"). InnerVolumeSpecName "tmp-dir". PluginName "kubernetes.io/empty-dir", VolumeGidValue ""
	Sep 16 10:45:23 addons-451841 kubelet[1517]: I0916 10:45:23.559728    1517 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2252baaa-acbc-43dd-b38e-e8cd59ac7825-kube-api-access-p6whm" (OuterVolumeSpecName: "kube-api-access-p6whm") pod "2252baaa-acbc-43dd-b38e-e8cd59ac7825" (UID: "2252baaa-acbc-43dd-b38e-e8cd59ac7825"). InnerVolumeSpecName "kube-api-access-p6whm". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:45:23 addons-451841 kubelet[1517]: I0916 10:45:23.655123    1517 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-p6whm\" (UniqueName: \"kubernetes.io/projected/2252baaa-acbc-43dd-b38e-e8cd59ac7825-kube-api-access-p6whm\") on node \"addons-451841\" DevicePath \"\""
	Sep 16 10:45:23 addons-451841 kubelet[1517]: I0916 10:45:23.655165    1517 reconciler_common.go:288] "Volume detached for volume \"tmp-dir\" (UniqueName: \"kubernetes.io/empty-dir/2252baaa-acbc-43dd-b38e-e8cd59ac7825-tmp-dir\") on node \"addons-451841\" DevicePath \"\""
	
	
	==> storage-provisioner [4ddb5fa614111a21d93d580947f3eb3b791d38fa6e497e66ae259ff6bb7fed15] <==
	I0916 10:30:56.265937       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:30:56.289948       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:30:56.290011       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:30:56.319402       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:30:56.319890       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"ef239dcc-ec3a-4a4d-b0db-6d9c8de888a1", APIVersion:"v1", ResourceVersion:"601", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157 became leader
	I0916 10:30:56.319948       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157!
	I0916 10:30:56.520389       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-451841 -n addons-451841
helpers_test.go:261: (dbg) Run:  kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (599.766µs)
helpers_test.go:263: kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/parallel/MetricsServer (369.13s)

                                                
                                    
x
+
TestAddons/parallel/CSI (362.5s)

                                                
                                                
=== RUN   TestAddons/parallel/CSI
=== PAUSE TestAddons/parallel/CSI

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/CSI
addons_test.go:567: csi-hostpath-driver pods stabilized in 10.221183ms
addons_test.go:570: (dbg) Run:  kubectl --context addons-451841 create -f testdata/csi-hostpath-driver/pvc.yaml
addons_test.go:570: (dbg) Non-zero exit: kubectl --context addons-451841 create -f testdata/csi-hostpath-driver/pvc.yaml: fork/exec /usr/local/bin/kubectl: exec format error (424.801µs)
addons_test.go:572: creating sample PVC with kubectl --context addons-451841 create -f testdata/csi-hostpath-driver/pvc.yaml failed: fork/exec /usr/local/bin/kubectl: exec format error
addons_test.go:575: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pvc "hpvc" in namespace "default" ...
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (316.708µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (655.266µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (622.502µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (657.513µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (557.683µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (504.35µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (430.882µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (482.574µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (454.84µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (469.987µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (496.514µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (599.536µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (599.717µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (400.252µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (602.334µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (495.709µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (743.856µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (375.924µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (392.728µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (386.656µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (466.295µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (600.825µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (329.483µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (428.896µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (543.627µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (352.384µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (495.02µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (346.96µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (13.244663ms)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (481.334µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (328.794µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (356.33µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (482.483µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (699.597µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (509.543µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (548.444µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (387.435µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (493.248µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (534.028µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (567.512µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (470.422µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (460.805µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (548.788µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (529.727µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (483.796µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (505.843µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (515.353µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (318.497µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (571.721µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (632.151µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (551.718µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (497.268µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (471.939µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (551.208µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (302.266µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (459.074µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (557.584µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (490.713µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (326.964µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (697.259µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (606.06µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (411.008µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (504.416µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (517.33µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (350.291µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (507.131µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (506.163µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (528.136µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (551.118µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (372.355µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (509.297µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (357.618µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (474.721µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (415.842µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (428.428µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (513.629µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (325.102µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (562.975µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (446.668µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (406.676µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (531.943µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (384.95µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (388.568µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (519.981µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (666.719µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (361.007µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (333.872µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (504.924µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (449.055µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (431.685µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (13.0909ms)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (474.384µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (487.808µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (542.577µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (350.455µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (507.599µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (491.336µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (480.169µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (456.695µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (505.481µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (534.084µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (409.647µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (574.133µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (543.972µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (453.158µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (406.611µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (411.985µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (415.202µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (857.717µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (482.951µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (522.228µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (407.956µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (514.655µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (569.999µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (509.379µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (331.28µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (592.579µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (519.89µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (569.744µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (413.781µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (604.706µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (341.233µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (711.323µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (591.258µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (1.844238ms)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (527.684µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (560.153µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (389.076µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (442.852µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (446.266µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (454.282µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (494.224µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (499.361µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (466.229µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (432.588µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (351.711µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (489.925µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (356.478µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (499.377µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (435.624µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (341.101µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (506.417µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (514.262µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (518.889µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (344.695µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (519.037µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (538.121µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (629.97µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (510.101µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (595.795µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (433.49µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (375.997µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (388.913µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (572.534µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (594.647µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (531.656µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (518.725µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (605.427µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (511.652µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (689.989µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (476.461µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (537.113µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (718.314µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (924.441µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (400.514µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (466.548µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (529.671µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (365.774µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (629.502µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (460.141µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (558.003µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (616.841µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (479.776µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (424.095µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (477.15µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (322.993µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (543.791µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (560.924µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (433.794µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (397.979µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (598.437µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (626.154µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (522.72µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (399.997µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (415.775µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (597.321µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (536.973µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (731.097µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (537.916µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (554.671µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (468.953µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (354.861µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (540.001µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (363.46µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (563.713µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (523.483µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (495.201µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (476.847µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (346.377µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (372.634µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (399.505µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (477.396µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (353.885µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (490.786µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (511.464µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (486.052µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (402.492µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (602.999µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (644.517µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (392.826µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (542.823µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (719.257µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (489.605µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (510.585µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (526.257µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (12.769812ms)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (495.808µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (360.933µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (379.526µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (497.203µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (417.375µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (481.572µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (555.943µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (377.926µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (485.601µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (459.246µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (478.381µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (610.491µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (498.787µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (527.037µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (810.661µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (465.67µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (351.949µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (510.405µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (461.675µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (617.769µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (586.261µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (557.001µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (594.548µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (582.905µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (400.063µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (495.89µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (733.961µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (496.169µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (941.228µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (5.613584ms)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (547.82µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (542.38µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (518.675µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (456.071µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (449.966µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (535.955µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (472.185µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (503.562µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (606.848µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (353.835µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (522.786µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (373.61µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (367.39µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (528.595µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (502.692µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (363.345µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (357.676µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (493.716µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (477.035µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (513.876µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (389.922µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (597.395µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (428.395µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (459.443µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (545.604µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (583.397µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (429.938µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (803.655µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (435.156µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (398.734µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (489.745µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (551.504µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (550.503µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (519.882µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (558.396µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (547.976µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (434.254µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (543.824µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (448.153µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (422.569µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (626.433µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (544.899µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (492.559µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (539.853µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (502.405µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (632.455µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (536.177µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (498.04µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (539.189µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (522.45µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (501.814µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (627.573µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (507.328µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (552.473µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (574.832µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (566.29µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (667.154µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (587.13µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (418.057µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (404.978µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (518.782µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (509.256µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (354.369µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (551.43µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (357.971µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (500.584µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (602.786µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (580.304µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (562.917µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (427.607µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (380.994µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (372.682µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (374.291µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (552.341µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (377.426µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (633.99µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (530.507µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (473.884µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (482.344µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (554.228µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (422.972µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (460.92µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (540.961µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (373.183µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (472.899µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (362.665µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (371.173µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (544.366µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (434.221µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (440.58µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (395.427µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (405.52µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (630.199µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (436.855µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (530.343µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (546.647µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (445.338µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (395.78µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (393.007µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (486.159µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (520.678µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (468.419µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (361.754µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (511.874µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (479.538µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (561.753µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (439.62µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (492.37µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (500.567µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (392.859µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (557.174µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (398.766µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (401.72µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: fork/exec /usr/local/bin/kubectl: exec format error (585.293µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:394: (dbg) Run:  kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Non-zero exit: kubectl --context addons-451841 get pvc hpvc -o jsonpath={.status.phase} -n default: context deadline exceeded (1.346µs)
helpers_test.go:396: TestAddons/parallel/CSI: WARNING: PVC get for "default" "hpvc" returned: context deadline exceeded
addons_test.go:576: failed waiting for PVC hpvc: context deadline exceeded
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestAddons/parallel/CSI]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect addons-451841
helpers_test.go:235: (dbg) docker inspect addons-451841:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4",
	        "Created": "2024-09-16T10:30:19.386072283Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2064804,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:30:19.514500967Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/hostname",
	        "HostsPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/hosts",
	        "LogPath": "/var/lib/docker/containers/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4/8a213d4c4dec191ac21897aa33219bb7cf8466fe9d00ad64304e7580315ef4c4-json.log",
	        "Name": "/addons-451841",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "addons-451841:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "addons-451841",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/merged",
	                "UpperDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/diff",
	                "WorkDir": "/var/lib/docker/overlay2/4f2aa30cbdba0bfdad123c484f990169b03c429c6e8eeb2b6ce825eb91b7aa2b/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "addons-451841",
	                "Source": "/var/lib/docker/volumes/addons-451841/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "addons-451841",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "addons-451841",
	                "name.minikube.sigs.k8s.io": "addons-451841",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "4da7b9dd4db914ae48304dba9ae2b2fb9dab68040bc986bf2751a778e62e4524",
	            "SandboxKey": "/var/run/docker/netns/4da7b9dd4db9",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40577"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40578"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40581"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40579"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40580"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "addons-451841": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "cd1315f485e3267c82ac80908081e901323e720ef1bb26de92d612c54dfd58d8",
	                    "EndpointID": "36f212e2a713c67d6c2ea54e50fbd0d8d7f7eb862ef913caa03a6cbfac71cb21",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "addons-451841",
	                        "8a213d4c4dec"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p addons-451841 -n addons-451841
helpers_test.go:244: <<< TestAddons/parallel/CSI FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestAddons/parallel/CSI]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p addons-451841 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p addons-451841 logs -n 25: (1.514858871s)
helpers_test.go:252: TestAddons/parallel/CSI logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| Command |                 Args                 |        Profile         |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC |                     |
	|         | -p download-only-911311              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0         |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-911311              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | -o=json --download-only              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | -p download-only-889126              |                        |         |         |                     |                     |
	|         | --force --alsologtostderr            |                        |         |         |                     |                     |
	|         | --kubernetes-version=v1.31.1         |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | --all                                | minikube               | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-889126              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-911311              | download-only-911311   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-889126              | download-only-889126   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | --download-only -p                   | download-docker-956530 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | download-docker-956530               |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | -p download-docker-956530            | download-docker-956530 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | --download-only -p                   | binary-mirror-852743   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | binary-mirror-852743                 |                        |         |         |                     |                     |
	|         | --alsologtostderr                    |                        |         |         |                     |                     |
	|         | --binary-mirror                      |                        |         |         |                     |                     |
	|         | http://127.0.0.1:35351               |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	| delete  | -p binary-mirror-852743              | binary-mirror-852743   | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| addons  | disable dashboard -p                 | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| addons  | enable dashboard -p                  | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| start   | -p addons-451841 --wait=true         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:33 UTC |
	|         | --memory=4000 --alsologtostderr      |                        |         |         |                     |                     |
	|         | --addons=registry                    |                        |         |         |                     |                     |
	|         | --addons=metrics-server              |                        |         |         |                     |                     |
	|         | --addons=volumesnapshots             |                        |         |         |                     |                     |
	|         | --addons=csi-hostpath-driver         |                        |         |         |                     |                     |
	|         | --addons=gcp-auth                    |                        |         |         |                     |                     |
	|         | --addons=cloud-spanner               |                        |         |         |                     |                     |
	|         | --addons=inspektor-gadget            |                        |         |         |                     |                     |
	|         | --addons=storage-provisioner-rancher |                        |         |         |                     |                     |
	|         | --addons=nvidia-device-plugin        |                        |         |         |                     |                     |
	|         | --addons=yakd --addons=volcano       |                        |         |         |                     |                     |
	|         | --driver=docker                      |                        |         |         |                     |                     |
	|         | --container-runtime=containerd       |                        |         |         |                     |                     |
	|         | --addons=ingress                     |                        |         |         |                     |                     |
	|         | --addons=ingress-dns                 |                        |         |         |                     |                     |
	| addons  | addons-451841 addons disable         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | yakd --alsologtostderr -v=1          |                        |         |         |                     |                     |
	| ip      | addons-451841 ip                     | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	| addons  | addons-451841 addons disable         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | registry --alsologtostderr           |                        |         |         |                     |                     |
	|         | -v=1                                 |                        |         |         |                     |                     |
	| addons  | disable nvidia-device-plugin         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | -p addons-451841                     |                        |         |         |                     |                     |
	| addons  | disable cloud-spanner -p             | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | addons-451841                        |                        |         |         |                     |                     |
	| addons  | enable headlamp                      | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:38 UTC | 16 Sep 24 10:38 UTC |
	|         | -p addons-451841                     |                        |         |         |                     |                     |
	|         | --alsologtostderr -v=1               |                        |         |         |                     |                     |
	| addons  | addons-451841 addons disable         | addons-451841          | jenkins | v1.34.0 | 16 Sep 24 10:39 UTC | 16 Sep 24 10:39 UTC |
	|         | headlamp --alsologtostderr           |                        |         |         |                     |                     |
	|         | -v=1                                 |                        |         |         |                     |                     |
	|---------|--------------------------------------|------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:29:55
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:29:55.756900 2064308 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:29:55.757118 2064308 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:55.757146 2064308 out.go:358] Setting ErrFile to fd 2...
	I0916 10:29:55.757164 2064308 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:55.757443 2064308 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:29:55.757918 2064308 out.go:352] Setting JSON to false
	I0916 10:29:55.758950 2064308 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":137538,"bootTime":1726345058,"procs":192,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:29:55.759050 2064308 start.go:139] virtualization:  
	I0916 10:29:55.762450 2064308 out.go:177] * [addons-451841] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:29:55.765218 2064308 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:29:55.765320 2064308 notify.go:220] Checking for updates...
	I0916 10:29:55.771607 2064308 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:29:55.774426 2064308 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:29:55.777761 2064308 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:29:55.780330 2064308 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:29:55.782904 2064308 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:29:55.785688 2064308 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:29:55.807382 2064308 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:29:55.807515 2064308 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:55.863178 2064308 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:49 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:55.853088898 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:55.863303 2064308 docker.go:318] overlay module found
	I0916 10:29:55.867792 2064308 out.go:177] * Using the docker driver based on user configuration
	I0916 10:29:55.870461 2064308 start.go:297] selected driver: docker
	I0916 10:29:55.870476 2064308 start.go:901] validating driver "docker" against <nil>
	I0916 10:29:55.870490 2064308 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:29:55.871367 2064308 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:55.922454 2064308 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:49 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:55.912678011 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:55.922666 2064308 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:29:55.922995 2064308 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:29:55.925501 2064308 out.go:177] * Using Docker driver with root privileges
	I0916 10:29:55.928402 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:29:55.928468 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:29:55.928481 2064308 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:29:55.928561 2064308 start.go:340] cluster config:
	{Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime
:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHA
uthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:29:55.931349 2064308 out.go:177] * Starting "addons-451841" primary control-plane node in "addons-451841" cluster
	I0916 10:29:55.933847 2064308 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:29:55.936549 2064308 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:29:55.939027 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:29:55.939075 2064308 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:29:55.939087 2064308 cache.go:56] Caching tarball of preloaded images
	I0916 10:29:55.939127 2064308 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:29:55.939172 2064308 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:29:55.939183 2064308 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:29:55.939554 2064308 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json ...
	I0916 10:29:55.939585 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json: {Name:mk4b86ccd0e04a15f77246bcc432382e6ef83bd3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:29:55.955829 2064308 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:29:55.955957 2064308 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:29:55.955999 2064308 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:29:55.956009 2064308 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:29:55.956017 2064308 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:29:55.956025 2064308 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:30:13.033213 2064308 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:30:13.033255 2064308 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:30:13.033286 2064308 start.go:360] acquireMachinesLock for addons-451841: {Name:mk3e70771a060125a26a792bbbf3ad5672ad97bd Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:30:13.033421 2064308 start.go:364] duration metric: took 111.614µs to acquireMachinesLock for "addons-451841"
	I0916 10:30:13.033454 2064308 start.go:93] Provisioning new machine with config: &{Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:min
ikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fa
lse CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:30:13.033622 2064308 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:30:13.035916 2064308 out.go:235] * Creating docker container (CPUs=2, Memory=4000MB) ...
	I0916 10:30:13.036188 2064308 start.go:159] libmachine.API.Create for "addons-451841" (driver="docker")
	I0916 10:30:13.036228 2064308 client.go:168] LocalClient.Create starting
	I0916 10:30:13.036363 2064308 main.go:141] libmachine: Creating CA: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 10:30:13.386329 2064308 main.go:141] libmachine: Creating client certificate: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 10:30:13.561829 2064308 cli_runner.go:164] Run: docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:30:13.576129 2064308 cli_runner.go:211] docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:30:13.576212 2064308 network_create.go:284] running [docker network inspect addons-451841] to gather additional debugging logs...
	I0916 10:30:13.576235 2064308 cli_runner.go:164] Run: docker network inspect addons-451841
	W0916 10:30:13.591552 2064308 cli_runner.go:211] docker network inspect addons-451841 returned with exit code 1
	I0916 10:30:13.591606 2064308 network_create.go:287] error running [docker network inspect addons-451841]: docker network inspect addons-451841: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network addons-451841 not found
	I0916 10:30:13.591621 2064308 network_create.go:289] output of [docker network inspect addons-451841]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network addons-451841 not found
	
	** /stderr **
	I0916 10:30:13.591720 2064308 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:30:13.608306 2064308 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001aeacb0}
	I0916 10:30:13.608356 2064308 network_create.go:124] attempt to create docker network addons-451841 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:30:13.608420 2064308 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=addons-451841 addons-451841
	I0916 10:30:13.683378 2064308 network_create.go:108] docker network addons-451841 192.168.49.0/24 created
	I0916 10:30:13.683411 2064308 kic.go:121] calculated static IP "192.168.49.2" for the "addons-451841" container
	I0916 10:30:13.683492 2064308 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:30:13.700184 2064308 cli_runner.go:164] Run: docker volume create addons-451841 --label name.minikube.sigs.k8s.io=addons-451841 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:30:13.718068 2064308 oci.go:103] Successfully created a docker volume addons-451841
	I0916 10:30:13.718179 2064308 cli_runner.go:164] Run: docker run --rm --name addons-451841-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --entrypoint /usr/bin/test -v addons-451841:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:30:15.421383 2064308 cli_runner.go:217] Completed: docker run --rm --name addons-451841-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --entrypoint /usr/bin/test -v addons-451841:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib: (1.703150378s)
	I0916 10:30:15.421417 2064308 oci.go:107] Successfully prepared a docker volume addons-451841
	I0916 10:30:15.421439 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:30:15.421458 2064308 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:30:15.421522 2064308 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-451841:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:30:19.320511 2064308 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v addons-451841:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (3.898937148s)
	I0916 10:30:19.320548 2064308 kic.go:203] duration metric: took 3.899086612s to extract preloaded images to volume ...
	W0916 10:30:19.320695 2064308 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:30:19.320803 2064308 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:30:19.371670 2064308 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname addons-451841 --name addons-451841 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=addons-451841 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=addons-451841 --network addons-451841 --ip 192.168.49.2 --volume addons-451841:/var --security-opt apparmor=unconfined --memory=4000mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:30:19.674459 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Running}}
	I0916 10:30:19.700795 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:19.725169 2064308 cli_runner.go:164] Run: docker exec addons-451841 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:30:19.777409 2064308 oci.go:144] the created container "addons-451841" has a running status.
	I0916 10:30:19.777438 2064308 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa...
	I0916 10:30:20.426549 2064308 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:30:20.459111 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:20.485764 2064308 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:30:20.485788 2064308 kic_runner.go:114] Args: [docker exec --privileged addons-451841 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:30:20.553044 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:20.584488 2064308 machine.go:93] provisionDockerMachine start ...
	I0916 10:30:20.584585 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.604705 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.605002 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.605024 2064308 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:30:20.750295 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-451841
	
	I0916 10:30:20.750323 2064308 ubuntu.go:169] provisioning hostname "addons-451841"
	I0916 10:30:20.750394 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.772671 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.772910 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.772922 2064308 main.go:141] libmachine: About to run SSH command:
	sudo hostname addons-451841 && echo "addons-451841" | sudo tee /etc/hostname
	I0916 10:30:20.923316 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: addons-451841
	
	I0916 10:30:20.923448 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:20.940021 2064308 main.go:141] libmachine: Using SSH client type: native
	I0916 10:30:20.940274 2064308 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40577 <nil> <nil>}
	I0916 10:30:20.940298 2064308 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\saddons-451841' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 addons-451841/g' /etc/hosts;
				else 
					echo '127.0.1.1 addons-451841' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:30:21.087110 2064308 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:30:21.087184 2064308 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:30:21.087263 2064308 ubuntu.go:177] setting up certificates
	I0916 10:30:21.087293 2064308 provision.go:84] configureAuth start
	I0916 10:30:21.087450 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.105254 2064308 provision.go:143] copyHostCerts
	I0916 10:30:21.105342 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:30:21.105468 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:30:21.105537 2064308 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:30:21.105585 2064308 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.addons-451841 san=[127.0.0.1 192.168.49.2 addons-451841 localhost minikube]
	I0916 10:30:21.497343 2064308 provision.go:177] copyRemoteCerts
	I0916 10:30:21.497413 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:30:21.497456 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.514957 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.611658 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:30:21.636890 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:30:21.662172 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:30:21.686808 2064308 provision.go:87] duration metric: took 599.477164ms to configureAuth
	I0916 10:30:21.686873 2064308 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:30:21.687116 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:21.687133 2064308 machine.go:96] duration metric: took 1.102625588s to provisionDockerMachine
	I0916 10:30:21.687141 2064308 client.go:171] duration metric: took 8.650903893s to LocalClient.Create
	I0916 10:30:21.687161 2064308 start.go:167] duration metric: took 8.650974974s to libmachine.API.Create "addons-451841"
	I0916 10:30:21.687171 2064308 start.go:293] postStartSetup for "addons-451841" (driver="docker")
	I0916 10:30:21.687182 2064308 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:30:21.687249 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:30:21.687299 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.706431 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.804065 2064308 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:30:21.807409 2064308 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:30:21.807450 2064308 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:30:21.807462 2064308 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:30:21.807470 2064308 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:30:21.807482 2064308 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:30:21.807551 2064308 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:30:21.807581 2064308 start.go:296] duration metric: took 120.403063ms for postStartSetup
	I0916 10:30:21.807904 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.824820 2064308 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/config.json ...
	I0916 10:30:21.825120 2064308 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:30:21.825171 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.841557 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.935711 2064308 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:30:21.940289 2064308 start.go:128] duration metric: took 8.906649729s to createHost
	I0916 10:30:21.940328 2064308 start.go:83] releasing machines lock for "addons-451841", held for 8.906892895s
	I0916 10:30:21.940401 2064308 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" addons-451841
	I0916 10:30:21.957512 2064308 ssh_runner.go:195] Run: cat /version.json
	I0916 10:30:21.957582 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.957842 2064308 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:30:21.957901 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:21.986070 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:21.992358 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:22.209233 2064308 ssh_runner.go:195] Run: systemctl --version
	I0916 10:30:22.213896 2064308 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:30:22.218111 2064308 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:30:22.243931 2064308 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:30:22.244032 2064308 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:30:22.274074 2064308 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:30:22.274104 2064308 start.go:495] detecting cgroup driver to use...
	I0916 10:30:22.274139 2064308 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:30:22.274194 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:30:22.287113 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:30:22.299302 2064308 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:30:22.299412 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:30:22.313515 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:30:22.327839 2064308 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:30:22.409410 2064308 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:30:22.494962 2064308 docker.go:233] disabling docker service ...
	I0916 10:30:22.495100 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:30:22.515205 2064308 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:30:22.527495 2064308 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:30:22.611444 2064308 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:30:22.705471 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:30:22.717496 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:30:22.735435 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:30:22.746124 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:30:22.757226 2064308 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:30:22.757299 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:30:22.767541 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:30:22.779039 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:30:22.788821 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:30:22.799244 2064308 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:30:22.808704 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:30:22.820713 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:30:22.831851 2064308 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:30:22.842394 2064308 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:30:22.851545 2064308 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:30:22.860424 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:22.961475 2064308 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:30:23.100987 2064308 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:30:23.101138 2064308 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:30:23.105001 2064308 start.go:563] Will wait 60s for crictl version
	I0916 10:30:23.105079 2064308 ssh_runner.go:195] Run: which crictl
	I0916 10:30:23.108696 2064308 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:30:23.154724 2064308 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:30:23.154812 2064308 ssh_runner.go:195] Run: containerd --version
	I0916 10:30:23.179902 2064308 ssh_runner.go:195] Run: containerd --version
	I0916 10:30:23.208730 2064308 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:30:23.210246 2064308 cli_runner.go:164] Run: docker network inspect addons-451841 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:30:23.225302 2064308 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:30:23.229071 2064308 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:30:23.240048 2064308 kubeadm.go:883] updating cluster {Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNa
mes:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Cus
tomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:30:23.240172 2064308 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:30:23.240246 2064308 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:30:23.276242 2064308 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:30:23.276266 2064308 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:30:23.276331 2064308 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:30:23.312895 2064308 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:30:23.312924 2064308 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:30:23.312933 2064308 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 containerd true true} ...
	I0916 10:30:23.313028 2064308 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=addons-451841 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:30:23.313095 2064308 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:30:23.348552 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:30:23.348577 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:30:23.348587 2064308 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:30:23.348609 2064308 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:addons-451841 NodeName:addons-451841 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc
/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:30:23.348742 2064308 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "addons-451841"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:30:23.348817 2064308 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:30:23.357634 2064308 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:30:23.357705 2064308 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:30:23.366468 2064308 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:30:23.385942 2064308 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:30:23.404422 2064308 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2167 bytes)
	I0916 10:30:23.422831 2064308 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:30:23.426382 2064308 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:30:23.437337 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:23.533359 2064308 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:30:23.547523 2064308 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841 for IP: 192.168.49.2
	I0916 10:30:23.547546 2064308 certs.go:194] generating shared ca certs ...
	I0916 10:30:23.547562 2064308 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:23.548238 2064308 certs.go:240] generating "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:30:24.056004 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt ...
	I0916 10:30:24.056043 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt: {Name:mk8fa0c4ced40ca68ac874100ce374f588dfea0b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.056261 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key ...
	I0916 10:30:24.056276 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key: {Name:mk04aab579c9f6bfd22c8de7442d64e7264cf4f3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.056381 2064308 certs.go:240] generating "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:30:24.923761 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt ...
	I0916 10:30:24.923793 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt: {Name:mke93617c0d085600c816f9e0c290a24fbe662eb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.923996 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key ...
	I0916 10:30:24.924009 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key: {Name:mk45200538cf11f718e98e7cfef8cbfcd0dafedf Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:24.924099 2064308 certs.go:256] generating profile certs ...
	I0916 10:30:24.924161 2064308 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key
	I0916 10:30:24.924189 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt with IP's: []
	I0916 10:30:25.053524 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt ...
	I0916 10:30:25.053557 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: {Name:mk37fa0b7d204f82c8af039a0f580deae8708ef5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.053750 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key ...
	I0916 10:30:25.053764 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.key: {Name:mkdb13343be22c0a0f72ff55f3a3cbca00768e68 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.053853 2064308 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707
	I0916 10:30:25.053877 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2]
	I0916 10:30:25.726904 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 ...
	I0916 10:30:25.726937 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707: {Name:mkf1dd897eefb9f7916ec8408e62b2271e638207 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.727141 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707 ...
	I0916 10:30:25.727156 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707: {Name:mkfbc7b493bc2e7d0b9e7f941111c820f07e3e82 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:25.727261 2064308 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt.9ccce707 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt
	I0916 10:30:25.727361 2064308 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key.9ccce707 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key
	I0916 10:30:25.727418 2064308 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key
	I0916 10:30:25.727439 2064308 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt with IP's: []
	I0916 10:30:26.011801 2064308 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt ...
	I0916 10:30:26.011842 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt: {Name:mkb21e9e32e986ac8dbc5fbe6c0db427fdb116ee Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:26.012049 2064308 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key ...
	I0916 10:30:26.012065 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key: {Name:mk95b366411d26459b0f1e143cac6384a51d5dfb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:26.012320 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:30:26.012368 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:30:26.012401 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:30:26.012429 2064308 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:30:26.013083 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:30:26.039152 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:30:26.064366 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:30:26.093086 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:30:26.116868 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1419 bytes)
	I0916 10:30:26.141663 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:30:26.166725 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:30:26.191142 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:30:26.214975 2064308 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:30:26.238979 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:30:26.256459 2064308 ssh_runner.go:195] Run: openssl version
	I0916 10:30:26.262089 2064308 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:30:26.271478 2064308 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.274966 2064308 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.275035 2064308 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:30:26.281888 2064308 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:30:26.291290 2064308 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:30:26.294471 2064308 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:30:26.294534 2064308 kubeadm.go:392] StartCluster: {Name:addons-451841 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:addons-451841 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames
:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Custom
QemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:30:26.294629 2064308 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:30:26.294715 2064308 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:30:26.332648 2064308 cri.go:89] found id: ""
	I0916 10:30:26.332740 2064308 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:30:26.341585 2064308 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:30:26.350524 2064308 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:30:26.350588 2064308 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:30:26.359218 2064308 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:30:26.359240 2064308 kubeadm.go:157] found existing configuration files:
	
	I0916 10:30:26.359319 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:30:26.368227 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:30:26.368297 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:30:26.377781 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:30:26.386494 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:30:26.386567 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:30:26.394932 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:30:26.403622 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:30:26.403687 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:30:26.412005 2064308 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:30:26.420862 2064308 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:30:26.420957 2064308 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:30:26.429543 2064308 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:30:26.471767 2064308 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:30:26.472019 2064308 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:30:26.498827 2064308 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:30:26.498904 2064308 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:30:26.498947 2064308 kubeadm.go:310] OS: Linux
	I0916 10:30:26.498998 2064308 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:30:26.499052 2064308 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:30:26.499103 2064308 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:30:26.499154 2064308 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:30:26.499218 2064308 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:30:26.499270 2064308 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:30:26.499320 2064308 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:30:26.499375 2064308 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:30:26.499426 2064308 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:30:26.577650 2064308 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:30:26.577762 2064308 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:30:26.577859 2064308 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:30:26.583045 2064308 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:30:26.586527 2064308 out.go:235]   - Generating certificates and keys ...
	I0916 10:30:26.586988 2064308 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:30:26.587103 2064308 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:30:26.754645 2064308 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:30:27.554793 2064308 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:30:28.039725 2064308 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:30:28.690015 2064308 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:30:29.764620 2064308 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:30:29.764907 2064308 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [addons-451841 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:30:30.341274 2064308 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:30:30.342274 2064308 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [addons-451841 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:30:30.576739 2064308 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:30:31.765912 2064308 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:30:33.601844 2064308 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:30:33.602129 2064308 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:30:34.584274 2064308 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:30:35.213888 2064308 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:30:35.990415 2064308 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:30:36.165269 2064308 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:30:36.564139 2064308 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:30:36.565009 2064308 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:30:36.568128 2064308 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:30:36.570826 2064308 out.go:235]   - Booting up control plane ...
	I0916 10:30:36.570944 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:30:36.571026 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:30:36.571834 2064308 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:30:36.583080 2064308 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:30:36.589082 2064308 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:30:36.589162 2064308 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:30:36.685676 2064308 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:30:36.685796 2064308 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:30:37.686643 2064308 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00127007s
	I0916 10:30:37.686760 2064308 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:30:44.689772 2064308 kubeadm.go:310] [api-check] The API server is healthy after 7.003101119s
	I0916 10:30:44.709044 2064308 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:30:44.727931 2064308 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:30:44.754458 2064308 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:30:44.754737 2064308 kubeadm.go:310] [mark-control-plane] Marking the node addons-451841 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:30:44.766739 2064308 kubeadm.go:310] [bootstrap-token] Using token: dx9pov.rexyyitopznv0w4v
	I0916 10:30:44.769416 2064308 out.go:235]   - Configuring RBAC rules ...
	I0916 10:30:44.769548 2064308 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:30:44.776785 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:30:44.785617 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:30:44.789704 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:30:44.794016 2064308 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:30:44.798127 2064308 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:30:45.099673 2064308 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:30:45.534575 2064308 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:30:46.098271 2064308 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:30:46.099422 2064308 kubeadm.go:310] 
	I0916 10:30:46.099510 2064308 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:30:46.099519 2064308 kubeadm.go:310] 
	I0916 10:30:46.099624 2064308 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:30:46.099640 2064308 kubeadm.go:310] 
	I0916 10:30:46.099673 2064308 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:30:46.099733 2064308 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:30:46.099783 2064308 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:30:46.099787 2064308 kubeadm.go:310] 
	I0916 10:30:46.099841 2064308 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:30:46.099846 2064308 kubeadm.go:310] 
	I0916 10:30:46.099898 2064308 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:30:46.099903 2064308 kubeadm.go:310] 
	I0916 10:30:46.099959 2064308 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:30:46.100036 2064308 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:30:46.100108 2064308 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:30:46.100113 2064308 kubeadm.go:310] 
	I0916 10:30:46.100201 2064308 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:30:46.100280 2064308 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:30:46.100285 2064308 kubeadm.go:310] 
	I0916 10:30:46.100377 2064308 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token dx9pov.rexyyitopznv0w4v \
	I0916 10:30:46.100482 2064308 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 10:30:46.100503 2064308 kubeadm.go:310] 	--control-plane 
	I0916 10:30:46.100507 2064308 kubeadm.go:310] 
	I0916 10:30:46.100599 2064308 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:30:46.100604 2064308 kubeadm.go:310] 
	I0916 10:30:46.100684 2064308 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token dx9pov.rexyyitopznv0w4v \
	I0916 10:30:46.100792 2064308 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 10:30:46.104209 2064308 kubeadm.go:310] W0916 10:30:26.468492    1025 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:30:46.104508 2064308 kubeadm.go:310] W0916 10:30:26.469422    1025 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:30:46.104733 2064308 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:30:46.104841 2064308 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:30:46.104863 2064308 cni.go:84] Creating CNI manager for ""
	I0916 10:30:46.104872 2064308 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:30:46.107753 2064308 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:30:46.110419 2064308 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:30:46.114304 2064308 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:30:46.114327 2064308 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:30:46.132060 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:30:46.405649 2064308 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:30:46.405772 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:46.405844 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes addons-451841 minikube.k8s.io/updated_at=2024_09_16T10_30_46_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=addons-451841 minikube.k8s.io/primary=true
	I0916 10:30:46.544610 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:46.544668 2064308 ops.go:34] apiserver oom_adj: -16
	I0916 10:30:47.045343 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:47.544713 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:48.045593 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:48.545262 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:49.044804 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:49.545373 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:50.045197 2064308 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:30:50.206616 2064308 kubeadm.go:1113] duration metric: took 3.800886781s to wait for elevateKubeSystemPrivileges
	I0916 10:30:50.206650 2064308 kubeadm.go:394] duration metric: took 23.912135022s to StartCluster
	I0916 10:30:50.206760 2064308 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:50.206888 2064308 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:30:50.207291 2064308 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:30:50.207495 2064308 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:30:50.207664 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:30:50.207912 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:50.207954 2064308 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:true csi-hostpath-driver:true dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:true gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:true ingress-dns:true inspektor-gadget:true istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:true nvidia-device-plugin:true nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:true registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:true volcano:true volumesnapshots:true yakd:true]
	I0916 10:30:50.208037 2064308 addons.go:69] Setting yakd=true in profile "addons-451841"
	I0916 10:30:50.208056 2064308 addons.go:234] Setting addon yakd=true in "addons-451841"
	I0916 10:30:50.208079 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.208590 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.209289 2064308 addons.go:69] Setting metrics-server=true in profile "addons-451841"
	I0916 10:30:50.209312 2064308 addons.go:234] Setting addon metrics-server=true in "addons-451841"
	I0916 10:30:50.209362 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.209903 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.210207 2064308 addons.go:69] Setting nvidia-device-plugin=true in profile "addons-451841"
	I0916 10:30:50.210240 2064308 addons.go:234] Setting addon nvidia-device-plugin=true in "addons-451841"
	I0916 10:30:50.210263 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.210767 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.218758 2064308 addons.go:69] Setting registry=true in profile "addons-451841"
	I0916 10:30:50.218798 2064308 addons.go:234] Setting addon registry=true in "addons-451841"
	I0916 10:30:50.218832 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.219427 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.219602 2064308 addons.go:69] Setting cloud-spanner=true in profile "addons-451841"
	I0916 10:30:50.219647 2064308 addons.go:234] Setting addon cloud-spanner=true in "addons-451841"
	I0916 10:30:50.219685 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.221722 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.222266 2064308 addons.go:69] Setting storage-provisioner=true in profile "addons-451841"
	I0916 10:30:50.222288 2064308 addons.go:234] Setting addon storage-provisioner=true in "addons-451841"
	I0916 10:30:50.222314 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.222854 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.243982 2064308 addons.go:69] Setting csi-hostpath-driver=true in profile "addons-451841"
	I0916 10:30:50.244056 2064308 addons.go:234] Setting addon csi-hostpath-driver=true in "addons-451841"
	I0916 10:30:50.244103 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.244878 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.250996 2064308 addons.go:69] Setting storage-provisioner-rancher=true in profile "addons-451841"
	I0916 10:30:50.251033 2064308 addons_storage_classes.go:33] enableOrDisableStorageClasses storage-provisioner-rancher=true on "addons-451841"
	I0916 10:30:50.251403 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.262479 2064308 addons.go:69] Setting volcano=true in profile "addons-451841"
	I0916 10:30:50.262526 2064308 addons.go:234] Setting addon volcano=true in "addons-451841"
	I0916 10:30:50.262567 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.263124 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.310432 2064308 addons.go:69] Setting default-storageclass=true in profile "addons-451841"
	I0916 10:30:50.310537 2064308 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "addons-451841"
	I0916 10:30:50.311117 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.312245 2064308 addons.go:69] Setting volumesnapshots=true in profile "addons-451841"
	I0916 10:30:50.312377 2064308 addons.go:234] Setting addon volumesnapshots=true in "addons-451841"
	I0916 10:30:50.312448 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.313757 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.327865 2064308 addons.go:69] Setting gcp-auth=true in profile "addons-451841"
	I0916 10:30:50.327962 2064308 mustload.go:65] Loading cluster: addons-451841
	I0916 10:30:50.330380 2064308 config.go:182] Loaded profile config "addons-451841": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:30:50.330866 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.331146 2064308 out.go:177] * Verifying Kubernetes components...
	I0916 10:30:50.334941 2064308 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:30:50.349812 2064308 addons.go:69] Setting ingress=true in profile "addons-451841"
	I0916 10:30:50.349850 2064308 addons.go:234] Setting addon ingress=true in "addons-451841"
	I0916 10:30:50.349897 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.350438 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.350648 2064308 addons.go:234] Setting addon storage-provisioner-rancher=true in "addons-451841"
	I0916 10:30:50.350723 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.351151 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.369853 2064308 addons.go:69] Setting ingress-dns=true in profile "addons-451841"
	I0916 10:30:50.369893 2064308 addons.go:234] Setting addon ingress-dns=true in "addons-451841"
	I0916 10:30:50.369937 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.370407 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.375867 2064308 out.go:177]   - Using image docker.io/registry:2.8.3
	I0916 10:30:50.382808 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/kube-registry-proxy:0.0.6
	I0916 10:30:50.384082 2064308 addons.go:69] Setting inspektor-gadget=true in profile "addons-451841"
	I0916 10:30:50.384111 2064308 addons.go:234] Setting addon inspektor-gadget=true in "addons-451841"
	I0916 10:30:50.384143 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.384714 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.421644 2064308 out.go:177]   - Using image registry.k8s.io/metrics-server/metrics-server:v0.7.2
	I0916 10:30:50.424401 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-apiservice.yaml
	I0916 10:30:50.424443 2064308 ssh_runner.go:362] scp metrics-server/metrics-apiservice.yaml --> /etc/kubernetes/addons/metrics-apiservice.yaml (424 bytes)
	I0916 10:30:50.424517 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.438309 2064308 out.go:177]   - Using image nvcr.io/nvidia/k8s-device-plugin:v0.16.2
	I0916 10:30:50.438567 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-rc.yaml
	I0916 10:30:50.438585 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-rc.yaml (860 bytes)
	I0916 10:30:50.438646 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.439842 2064308 out.go:177]   - Using image docker.io/marcnuri/yakd:0.0.5
	I0916 10:30:50.440236 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-webhook-manager:v1.9.0
	I0916 10:30:50.440402 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.462370 2064308 addons.go:234] Setting addon default-storageclass=true in "addons-451841"
	I0916 10:30:50.462409 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:50.463191 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:50.463463 2064308 addons.go:431] installing /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:30:50.466889 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/nvidia-device-plugin.yaml (1966 bytes)
	I0916 10:30:50.467021 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.474834 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-ns.yaml
	I0916 10:30:50.474857 2064308 ssh_runner.go:362] scp yakd/yakd-ns.yaml --> /etc/kubernetes/addons/yakd-ns.yaml (171 bytes)
	I0916 10:30:50.474919 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.484574 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:30:50.485713 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-external-health-monitor-controller:v0.7.0
	I0916 10:30:50.508488 2064308 out.go:177]   - Using image gcr.io/cloud-spanner-emulator/emulator:1.5.23
	I0916 10:30:50.525937 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-controller-manager:v1.9.0
	I0916 10:30:50.526169 2064308 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:30:50.526183 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:30:50.526247 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.542222 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.6.0
	I0916 10:30:50.543269 2064308 addons.go:431] installing /etc/kubernetes/addons/deployment.yaml
	I0916 10:30:50.543418 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/deployment.yaml (1004 bytes)
	I0916 10:30:50.543483 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.563839 2064308 out.go:177]   - Using image docker.io/volcanosh/vc-scheduler:v1.9.0
	I0916 10:30:50.567954 2064308 addons.go:431] installing /etc/kubernetes/addons/volcano-deployment.yaml
	I0916 10:30:50.567983 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volcano-deployment.yaml (434001 bytes)
	I0916 10:30:50.568053 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.583888 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.587279 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/minikube-ingress-dns:0.0.3
	I0916 10:30:50.587486 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/controller:v1.11.2
	I0916 10:30:50.589757 2064308 out.go:177]   - Using image docker.io/busybox:stable
	I0916 10:30:50.589894 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/hostpathplugin:v1.9.0
	I0916 10:30:50.592333 2064308 addons.go:431] installing /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:30:50.592357 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-dns-pod.yaml (2442 bytes)
	I0916 10:30:50.592588 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.594469 2064308 out.go:177]   - Using image docker.io/rancher/local-path-provisioner:v0.0.22
	I0916 10:30:50.594639 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:30:50.596571 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/livenessprobe:v2.8.0
	I0916 10:30:50.596784 2064308 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:30:50.596798 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner-rancher.yaml (3113 bytes)
	I0916 10:30:50.596863 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.628847 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:30:50.631659 2064308 addons.go:431] installing /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:30:50.631684 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ingress-deploy.yaml (16078 bytes)
	I0916 10:30:50.631748 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.645470 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.650239 2064308 out.go:177]   - Using image ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0
	I0916 10:30:50.650364 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-resizer:v1.6.0
	I0916 10:30:50.650401 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/snapshot-controller:v6.1.0
	I0916 10:30:50.652227 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.653039 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-namespace.yaml
	I0916 10:30:50.654718 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-namespace.yaml --> /etc/kubernetes/addons/ig-namespace.yaml (55 bytes)
	I0916 10:30:50.654790 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.661463 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-snapshotter:v6.1.0
	I0916 10:30:50.661708 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.654527 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml
	I0916 10:30:50.662209 2064308 ssh_runner.go:362] scp volumesnapshots/csi-hostpath-snapshotclass.yaml --> /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml (934 bytes)
	I0916 10:30:50.662349 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.674173 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-provisioner:v3.3.0
	I0916 10:30:50.676994 2064308 out.go:177]   - Using image registry.k8s.io/sig-storage/csi-attacher:v4.0.0
	I0916 10:30:50.680275 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-attacher.yaml
	I0916 10:30:50.680305 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-attacher.yaml --> /etc/kubernetes/addons/rbac-external-attacher.yaml (3073 bytes)
	I0916 10:30:50.680378 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.691037 2064308 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:30:50.691057 2064308 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:30:50.691123 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:50.774282 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.780046 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.807312 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.827826 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.831006 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.853363 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.867169 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.875051 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.875081 2064308 retry.go:31] will retry after 209.079202ms: ssh: handshake failed: EOF
	I0916 10:30:50.875514 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.878034 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.878076 2064308 retry.go:31] will retry after 358.329045ms: ssh: handshake failed: EOF
	I0916 10:30:50.878970 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:50.891671 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	W0916 10:30:50.913115 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:50.913144 2064308 retry.go:31] will retry after 291.220359ms: ssh: handshake failed: EOF
	W0916 10:30:51.085514 2064308 sshutil.go:64] dial failure (will retry): ssh: handshake failed: EOF
	I0916 10:30:51.085558 2064308 retry.go:31] will retry after 406.090408ms: ssh: handshake failed: EOF
	I0916 10:30:51.380959 2064308 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml": (1.173254923s)
	I0916 10:30:51.381043 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml
	I0916 10:30:51.381158 2064308 ssh_runner.go:235] Completed: sudo systemctl daemon-reload: (1.046146925s)
	I0916 10:30:51.381191 2064308 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:30:51.381193 2064308 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:30:51.393457 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-serviceaccount.yaml
	I0916 10:30:51.393478 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-serviceaccount.yaml --> /etc/kubernetes/addons/ig-serviceaccount.yaml (80 bytes)
	I0916 10:30:51.405074 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:30:51.536141 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml
	I0916 10:30:51.553523 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-hostpath.yaml
	I0916 10:30:51.553553 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-hostpath.yaml --> /etc/kubernetes/addons/rbac-hostpath.yaml (4266 bytes)
	I0916 10:30:51.664299 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-role.yaml
	I0916 10:30:51.664331 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-role.yaml --> /etc/kubernetes/addons/ig-role.yaml (210 bytes)
	I0916 10:30:51.694553 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-sa.yaml
	I0916 10:30:51.694580 2064308 ssh_runner.go:362] scp yakd/yakd-sa.yaml --> /etc/kubernetes/addons/yakd-sa.yaml (247 bytes)
	I0916 10:30:51.695380 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml
	I0916 10:30:51.703369 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-svc.yaml
	I0916 10:30:51.703394 2064308 ssh_runner.go:362] scp registry/registry-svc.yaml --> /etc/kubernetes/addons/registry-svc.yaml (398 bytes)
	I0916 10:30:51.711436 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-deployment.yaml
	I0916 10:30:51.711460 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/metrics-server-deployment.yaml (1907 bytes)
	I0916 10:30:51.716209 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml
	I0916 10:30:51.833745 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml
	I0916 10:30:51.872114 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-rbac.yaml
	I0916 10:30:51.872156 2064308 ssh_runner.go:362] scp metrics-server/metrics-server-rbac.yaml --> /etc/kubernetes/addons/metrics-server-rbac.yaml (2175 bytes)
	I0916 10:30:51.879573 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml
	I0916 10:30:51.879603 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-health-monitor-controller.yaml --> /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml (3038 bytes)
	I0916 10:30:51.894115 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:30:51.927534 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-rolebinding.yaml
	I0916 10:30:51.927573 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-rolebinding.yaml --> /etc/kubernetes/addons/ig-rolebinding.yaml (244 bytes)
	I0916 10:30:51.967967 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-crb.yaml
	I0916 10:30:51.967997 2064308 ssh_runner.go:362] scp yakd/yakd-crb.yaml --> /etc/kubernetes/addons/yakd-crb.yaml (422 bytes)
	I0916 10:30:51.987647 2064308 addons.go:431] installing /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:30:51.987672 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/registry-proxy.yaml (947 bytes)
	I0916 10:30:52.018799 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml
	I0916 10:30:52.018835 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotclasses.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml (6471 bytes)
	I0916 10:30:52.040829 2064308 addons.go:431] installing /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:30:52.040863 2064308 ssh_runner.go:362] scp metrics-server/metrics-server-service.yaml --> /etc/kubernetes/addons/metrics-server-service.yaml (446 bytes)
	I0916 10:30:52.062309 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-provisioner.yaml
	I0916 10:30:52.062358 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-provisioner.yaml --> /etc/kubernetes/addons/rbac-external-provisioner.yaml (4442 bytes)
	I0916 10:30:52.141020 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrole.yaml
	I0916 10:30:52.141055 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrole.yaml --> /etc/kubernetes/addons/ig-clusterrole.yaml (1485 bytes)
	I0916 10:30:52.150339 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml
	I0916 10:30:52.156143 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-svc.yaml
	I0916 10:30:52.156182 2064308 ssh_runner.go:362] scp yakd/yakd-svc.yaml --> /etc/kubernetes/addons/yakd-svc.yaml (412 bytes)
	I0916 10:30:52.267727 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-resizer.yaml
	I0916 10:30:52.267754 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-resizer.yaml --> /etc/kubernetes/addons/rbac-external-resizer.yaml (2943 bytes)
	I0916 10:30:52.278358 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml
	I0916 10:30:52.278410 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshotcontents.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml (23126 bytes)
	I0916 10:30:52.295260 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml
	I0916 10:30:52.315396 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml
	I0916 10:30:52.396988 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-clusterrolebinding.yaml
	I0916 10:30:52.397029 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-clusterrolebinding.yaml --> /etc/kubernetes/addons/ig-clusterrolebinding.yaml (274 bytes)
	I0916 10:30:52.414014 2064308 addons.go:431] installing /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:30:52.414040 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/yakd-dp.yaml (2017 bytes)
	I0916 10:30:52.514419 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-external-snapshotter.yaml
	I0916 10:30:52.514447 2064308 ssh_runner.go:362] scp csi-hostpath-driver/rbac/rbac-external-snapshotter.yaml --> /etc/kubernetes/addons/rbac-external-snapshotter.yaml (3149 bytes)
	I0916 10:30:52.534199 2064308 addons.go:431] installing /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml
	I0916 10:30:52.534239 2064308 ssh_runner.go:362] scp volumesnapshots/snapshot.storage.k8s.io_volumesnapshots.yaml --> /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml (19582 bytes)
	I0916 10:30:52.671597 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/nvidia-device-plugin.yaml: (1.290515457s)
	I0916 10:30:52.715081 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-crd.yaml
	I0916 10:30:52.715111 2064308 ssh_runner.go:362] scp inspektor-gadget/ig-crd.yaml --> /etc/kubernetes/addons/ig-crd.yaml (5216 bytes)
	I0916 10:30:52.719191 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml
	I0916 10:30:52.832284 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-attacher.yaml
	I0916 10:30:52.832313 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-attacher.yaml (2143 bytes)
	I0916 10:30:52.924488 2064308 addons.go:431] installing /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml
	I0916 10:30:52.924521 2064308 ssh_runner.go:362] scp volumesnapshots/rbac-volume-snapshot-controller.yaml --> /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml (3545 bytes)
	I0916 10:30:53.168769 2064308 addons.go:431] installing /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:30:53.168802 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/ig-daemonset.yaml (7735 bytes)
	I0916 10:30:53.177620 2064308 ssh_runner.go:235] Completed: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -": (1.796357981s)
	I0916 10:30:53.177657 2064308 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:30:53.177732 2064308 ssh_runner.go:235] Completed: sudo systemctl start kubelet: (1.79643144s)
	I0916 10:30:53.179507 2064308 node_ready.go:35] waiting up to 6m0s for node "addons-451841" to be "Ready" ...
	I0916 10:30:53.184404 2064308 node_ready.go:49] node "addons-451841" has status "Ready":"True"
	I0916 10:30:53.184443 2064308 node_ready.go:38] duration metric: took 4.710029ms for node "addons-451841" to be "Ready" ...
	I0916 10:30:53.184458 2064308 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:30:53.197525 2064308 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace to be "Ready" ...
	I0916 10:30:53.269899 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml
	I0916 10:30:53.269941 2064308 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-driverinfo.yaml --> /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml (1274 bytes)
	I0916 10:30:53.282557 2064308 addons.go:431] installing /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:30:53.282590 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml (1475 bytes)
	I0916 10:30:53.466493 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-plugin.yaml
	I0916 10:30:53.466519 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-plugin.yaml (8201 bytes)
	I0916 10:30:53.471643 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml
	I0916 10:30:53.602578 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:30:53.683185 2064308 kapi.go:214] "coredns" deployment in "kube-system" namespace and "addons-451841" context rescaled to 1 replicas
	I0916 10:30:53.701295 2064308 pod_ready.go:98] error getting pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bvmss" not found
	I0916 10:30:53.701323 2064308 pod_ready.go:82] duration metric: took 503.765362ms for pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace to be "Ready" ...
	E0916 10:30:53.701335 2064308 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "coredns-7c65d6cfc9-bvmss" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bvmss" not found
	I0916 10:30:53.701342 2064308 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace to be "Ready" ...
	I0916 10:30:53.722187 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-resizer.yaml
	I0916 10:30:53.722214 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/csi-hostpath-resizer.yaml (2191 bytes)
	I0916 10:30:54.162813 2064308 addons.go:431] installing /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:30:54.162856 2064308 ssh_runner.go:362] scp csi-hostpath-driver/deploy/csi-hostpath-storageclass.yaml --> /etc/kubernetes/addons/csi-hostpath-storageclass.yaml (846 bytes)
	I0916 10:30:54.507449 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml
	I0916 10:30:55.304651 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (3.899501244s)
	I0916 10:30:55.634996 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-dns-pod.yaml: (3.939582165s)
	I0916 10:30:55.635110 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner-rancher.yaml: (4.098941534s)
	I0916 10:30:55.711983 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:30:57.666996 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_application_credentials.json (162 bytes)
	I0916 10:30:57.667089 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:57.696419 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:57.712916 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:30:58.304674 2064308 ssh_runner.go:362] scp memory --> /var/lib/minikube/google_cloud_project (12 bytes)
	I0916 10:30:58.423037 2064308 addons.go:234] Setting addon gcp-auth=true in "addons-451841"
	I0916 10:30:58.423145 2064308 host.go:66] Checking if "addons-451841" exists ...
	I0916 10:30:58.423647 2064308 cli_runner.go:164] Run: docker container inspect addons-451841 --format={{.State.Status}}
	I0916 10:30:58.453963 2064308 ssh_runner.go:195] Run: cat /var/lib/minikube/google_application_credentials.json
	I0916 10:30:58.454022 2064308 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" addons-451841
	I0916 10:30:58.488418 2064308 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40577 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/addons-451841/id_rsa Username:docker}
	I0916 10:30:59.724111 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:01.085964 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/volcano-deployment.yaml: (9.369716418s)
	I0916 10:31:01.086088 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/deployment.yaml: (9.252309206s)
	I0916 10:31:01.086143 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml: (9.191998071s)
	I0916 10:31:01.086179 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/registry-rc.yaml -f /etc/kubernetes/addons/registry-svc.yaml -f /etc/kubernetes/addons/registry-proxy.yaml: (8.93580956s)
	I0916 10:31:01.086966 2064308 addons.go:475] Verifying addon registry=true in "addons-451841"
	I0916 10:31:01.086280 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ingress-deploy.yaml: (8.790993869s)
	I0916 10:31:01.087161 2064308 addons.go:475] Verifying addon ingress=true in "addons-451841"
	I0916 10:31:01.086364 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/yakd-ns.yaml -f /etc/kubernetes/addons/yakd-sa.yaml -f /etc/kubernetes/addons/yakd-crb.yaml -f /etc/kubernetes/addons/yakd-svc.yaml -f /etc/kubernetes/addons/yakd-dp.yaml: (8.367136002s)
	I0916 10:31:01.086423 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/ig-namespace.yaml -f /etc/kubernetes/addons/ig-serviceaccount.yaml -f /etc/kubernetes/addons/ig-role.yaml -f /etc/kubernetes/addons/ig-rolebinding.yaml -f /etc/kubernetes/addons/ig-clusterrole.yaml -f /etc/kubernetes/addons/ig-clusterrolebinding.yaml -f /etc/kubernetes/addons/ig-crd.yaml -f /etc/kubernetes/addons/ig-daemonset.yaml: (7.614752608s)
	I0916 10:31:01.086494 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (7.483872887s)
	I0916 10:31:01.086607 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/metrics-apiservice.yaml -f /etc/kubernetes/addons/metrics-server-deployment.yaml -f /etc/kubernetes/addons/metrics-server-rbac.yaml -f /etc/kubernetes/addons/metrics-server-service.yaml: (8.771185269s)
	I0916 10:31:01.087690 2064308 addons.go:475] Verifying addon metrics-server=true in "addons-451841"
	W0916 10:31:01.087784 2064308 addons.go:457] apply failed, will retry: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:31:01.087807 2064308 retry.go:31] will retry after 241.995667ms: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: Process exited with status 1
	stdout:
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotclasses.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshotcontents.snapshot.storage.k8s.io created
	customresourcedefinition.apiextensions.k8s.io/volumesnapshots.snapshot.storage.k8s.io created
	serviceaccount/snapshot-controller created
	clusterrole.rbac.authorization.k8s.io/snapshot-controller-runner created
	clusterrolebinding.rbac.authorization.k8s.io/snapshot-controller-role created
	role.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	rolebinding.rbac.authorization.k8s.io/snapshot-controller-leaderelection created
	deployment.apps/snapshot-controller created
	
	stderr:
	error: resource mapping not found for name: "csi-hostpath-snapclass" namespace: "" from "/etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml": no matches for kind "VolumeSnapshotClass" in version "snapshot.storage.k8s.io/v1"
	ensure CRDs are installed first
	I0916 10:31:01.089709 2064308 out.go:177] * To access YAKD - Kubernetes Dashboard, wait for Pod to be ready and run the following command:
	
		minikube -p addons-451841 service yakd-dashboard -n yakd-dashboard
	
	I0916 10:31:01.089714 2064308 out.go:177] * Verifying ingress addon...
	I0916 10:31:01.089782 2064308 out.go:177] * Verifying registry addon...
	I0916 10:31:01.092615 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=registry" in ns "kube-system" ...
	I0916 10:31:01.093670 2064308 kapi.go:75] Waiting for pod with label "app.kubernetes.io/name=ingress-nginx" in ns "ingress-nginx" ...
	I0916 10:31:01.146629 2064308 kapi.go:86] Found 3 Pods for label selector app.kubernetes.io/name=ingress-nginx
	I0916 10:31:01.146661 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:01.147988 2064308 kapi.go:86] Found 2 Pods for label selector kubernetes.io/minikube-addons=registry
	I0916 10:31:01.148013 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:01.330778 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml
	I0916 10:31:01.607432 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:01.608116 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:01.783267 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/rbac-external-attacher.yaml -f /etc/kubernetes/addons/rbac-hostpath.yaml -f /etc/kubernetes/addons/rbac-external-health-monitor-controller.yaml -f /etc/kubernetes/addons/rbac-external-provisioner.yaml -f /etc/kubernetes/addons/rbac-external-resizer.yaml -f /etc/kubernetes/addons/rbac-external-snapshotter.yaml -f /etc/kubernetes/addons/csi-hostpath-attacher.yaml -f /etc/kubernetes/addons/csi-hostpath-driverinfo.yaml -f /etc/kubernetes/addons/csi-hostpath-plugin.yaml -f /etc/kubernetes/addons/csi-hostpath-resizer.yaml -f /etc/kubernetes/addons/csi-hostpath-storageclass.yaml: (7.275754054s)
	I0916 10:31:01.783417 2064308 addons.go:475] Verifying addon csi-hostpath-driver=true in "addons-451841"
	I0916 10:31:01.783367 2064308 ssh_runner.go:235] Completed: cat /var/lib/minikube/google_application_credentials.json: (3.329378043s)
	I0916 10:31:01.785766 2064308 out.go:177]   - Using image registry.k8s.io/ingress-nginx/kube-webhook-certgen:v1.4.3
	I0916 10:31:01.785796 2064308 out.go:177] * Verifying csi-hostpath-driver addon...
	I0916 10:31:01.788664 2064308 out.go:177]   - Using image gcr.io/k8s-minikube/gcp-auth-webhook:v0.1.2
	I0916 10:31:01.789894 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
	I0916 10:31:01.794958 2064308 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
	I0916 10:31:01.795006 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:01.797295 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-ns.yaml
	I0916 10:31:01.797332 2064308 ssh_runner.go:362] scp gcp-auth/gcp-auth-ns.yaml --> /etc/kubernetes/addons/gcp-auth-ns.yaml (700 bytes)
	I0916 10:31:01.893997 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-service.yaml
	I0916 10:31:01.894071 2064308 ssh_runner.go:362] scp gcp-auth/gcp-auth-service.yaml --> /etc/kubernetes/addons/gcp-auth-service.yaml (788 bytes)
	I0916 10:31:01.937742 2064308 addons.go:431] installing /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:31:01.937810 2064308 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/gcp-auth-webhook.yaml (5421 bytes)
	I0916 10:31:01.987240 2064308 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml
	I0916 10:31:02.097286 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:02.100875 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:02.209340 2064308 pod_ready.go:103] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:02.305635 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:02.597370 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:02.599723 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:02.795942 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:03.100397 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:03.103196 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:03.312002 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:03.374850 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/gcp-auth-ns.yaml -f /etc/kubernetes/addons/gcp-auth-service.yaml -f /etc/kubernetes/addons/gcp-auth-webhook.yaml: (1.387566006s)
	I0916 10:31:03.375988 2064308 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply --force -f /etc/kubernetes/addons/csi-hostpath-snapshotclass.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotclasses.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshotcontents.yaml -f /etc/kubernetes/addons/snapshot.storage.k8s.io_volumesnapshots.yaml -f /etc/kubernetes/addons/rbac-volume-snapshot-controller.yaml -f /etc/kubernetes/addons/volume-snapshot-controller-deployment.yaml: (2.045090366s)
	I0916 10:31:03.378127 2064308 addons.go:475] Verifying addon gcp-auth=true in "addons-451841"
	I0916 10:31:03.381777 2064308 out.go:177] * Verifying gcp-auth addon...
	I0916 10:31:03.384298 2064308 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=gcp-auth" in ns "gcp-auth" ...
	I0916 10:31:03.406084 2064308 kapi.go:86] Found 0 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:31:03.599867 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:03.600481 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:03.720241 2064308 pod_ready.go:93] pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.720276 2064308 pod_ready.go:82] duration metric: took 10.018926311s for pod "coredns-7c65d6cfc9-jqthn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.720289 2064308 pod_ready.go:79] waiting up to 6m0s for pod "etcd-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.746042 2064308 pod_ready.go:93] pod "etcd-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.746067 2064308 pod_ready.go:82] duration metric: took 25.771231ms for pod "etcd-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.746081 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.752533 2064308 pod_ready.go:93] pod "kube-apiserver-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.752559 2064308 pod_ready.go:82] duration metric: took 6.470582ms for pod "kube-apiserver-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.752571 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.758462 2064308 pod_ready.go:93] pod "kube-controller-manager-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.758495 2064308 pod_ready.go:82] duration metric: took 5.916018ms for pod "kube-controller-manager-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.758507 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-tltkn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.765336 2064308 pod_ready.go:93] pod "kube-proxy-tltkn" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:03.765369 2064308 pod_ready.go:82] duration metric: took 6.854119ms for pod "kube-proxy-tltkn" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.765382 2064308 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:03.795811 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:04.099344 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:04.100673 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:04.104903 2064308 pod_ready.go:93] pod "kube-scheduler-addons-451841" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:04.104972 2064308 pod_ready.go:82] duration metric: took 339.581954ms for pod "kube-scheduler-addons-451841" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:04.104999 2064308 pod_ready.go:79] waiting up to 6m0s for pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:04.295860 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:04.598910 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:04.602815 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:04.795954 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:05.100224 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:05.101534 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:05.296166 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:05.599439 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:05.601426 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:05.795442 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:06.102393 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:06.103036 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:06.122130 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:06.299045 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:06.599932 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:06.601206 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:06.814263 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:07.096848 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:07.101223 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:07.295217 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:07.599444 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:07.600431 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:07.795082 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:08.101892 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:08.102976 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:08.296014 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:08.598395 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:08.598643 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:08.613020 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:08.795739 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:09.103941 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:09.104967 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:09.295694 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:09.599659 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:09.601180 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:09.796354 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:10.098446 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:10.099577 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:10.295198 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:10.597281 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:10.599286 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:10.616287 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:10.795720 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:11.097801 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:11.099342 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:11.297048 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:11.599247 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:11.599974 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:11.794513 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:12.097432 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:12.099058 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:12.295097 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:12.598578 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:12.599897 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:12.796822 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:13.096898 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:13.098940 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:13.112547 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:13.295802 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:13.599642 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:13.600761 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:13.794583 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:14.096452 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:14.098429 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:14.297517 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:14.598010 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:14.599983 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:14.795140 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:15.104125 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:15.104975 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:15.113778 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:15.295679 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:15.598018 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:15.598555 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:15.795791 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:16.096811 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:16.099236 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:16.296057 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:16.597945 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:16.599646 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:16.797262 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:17.098985 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:17.099689 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:17.295469 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:17.599269 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:17.600683 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:17.611951 2064308 pod_ready.go:103] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"False"
	I0916 10:31:17.794427 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:18.099862 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:18.101710 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:18.296191 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:18.596772 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:18.600049 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:18.811403 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:19.098130 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:19.099143 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:19.111509 2064308 pod_ready.go:93] pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace has status "Ready":"True"
	I0916 10:31:19.111570 2064308 pod_ready.go:82] duration metric: took 15.006549742s for pod "nvidia-device-plugin-daemonset-l6r5c" in "kube-system" namespace to be "Ready" ...
	I0916 10:31:19.111587 2064308 pod_ready.go:39] duration metric: took 25.927112572s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:31:19.111604 2064308 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:31:19.111670 2064308 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:31:19.162518 2064308 api_server.go:72] duration metric: took 28.954985289s to wait for apiserver process to appear ...
	I0916 10:31:19.162546 2064308 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:31:19.162572 2064308 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:31:19.179642 2064308 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:31:19.180628 2064308 api_server.go:141] control plane version: v1.31.1
	I0916 10:31:19.180658 2064308 api_server.go:131] duration metric: took 18.103285ms to wait for apiserver health ...
	I0916 10:31:19.180668 2064308 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:31:19.201200 2064308 system_pods.go:59] 18 kube-system pods found
	I0916 10:31:19.201280 2064308 system_pods.go:61] "coredns-7c65d6cfc9-jqthn" [bc44b698-a863-4ab8-85d8-5bd54d141bd3] Running
	I0916 10:31:19.201299 2064308 system_pods.go:61] "csi-hostpath-attacher-0" [b2e317f7-5d09-4cd6-823d-cc849d3bf9e2] Running
	I0916 10:31:19.201316 2064308 system_pods.go:61] "csi-hostpath-resizer-0" [1d31c631-4247-4ffe-8a87-9b11803bc389] Running
	I0916 10:31:19.201350 2064308 system_pods.go:61] "csi-hostpathplugin-r28vj" [ad580f65-4a4a-445c-bec7-b518245397ea] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0916 10:31:19.201372 2064308 system_pods.go:61] "etcd-addons-451841" [1a9d168e-83c3-4fc0-b188-1da688352b51] Running
	I0916 10:31:19.201392 2064308 system_pods.go:61] "kindnet-zckxr" [b831bf61-6a35-4681-afe4-f5a0f875a7b5] Running
	I0916 10:31:19.201409 2064308 system_pods.go:61] "kube-apiserver-addons-451841" [db38aff8-23ff-466f-a1b0-ab5f2041183c] Running
	I0916 10:31:19.201425 2064308 system_pods.go:61] "kube-controller-manager-addons-451841" [53d22d78-137d-4306-b8df-d9a55061c9df] Running
	I0916 10:31:19.201458 2064308 system_pods.go:61] "kube-ingress-dns-minikube" [c77fac29-39b0-4e39-94f6-a72ac395b130] Running
	I0916 10:31:19.201483 2064308 system_pods.go:61] "kube-proxy-tltkn" [073e7c9c-1ec1-45db-9603-68862d546266] Running
	I0916 10:31:19.201501 2064308 system_pods.go:61] "kube-scheduler-addons-451841" [5b953c55-7b45-4221-89bf-177d1a4efd05] Running
	I0916 10:31:19.201520 2064308 system_pods.go:61] "metrics-server-84c5f94fbc-q47pm" [2252baaa-acbc-43dd-b38e-e8cd59ac7825] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0916 10:31:19.201537 2064308 system_pods.go:61] "nvidia-device-plugin-daemonset-l6r5c" [9645617a-ab29-4c4e-a4ec-4ea217ffb5fb] Running
	I0916 10:31:19.201566 2064308 system_pods.go:61] "registry-66c9cd494c-l957b" [17af08bf-9965-4bec-8d1b-0c4c37167ac1] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
	I0916 10:31:19.201591 2064308 system_pods.go:61] "registry-proxy-9cpxl" [10361d7a-9abb-41e6-88eb-2194d01e1301] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0916 10:31:19.201613 2064308 system_pods.go:61] "snapshot-controller-56fcc65765-6llf9" [7febb7b8-15ce-499d-83f7-b82e03e35902] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.201634 2064308 system_pods.go:61] "snapshot-controller-56fcc65765-qxvll" [0a01a9d7-31e0-4965-baba-078f44b17fac] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.201663 2064308 system_pods.go:61] "storage-provisioner" [6a86a07f-e25d-4ac9-9c07-dc9ae0048083] Running
	I0916 10:31:19.201686 2064308 system_pods.go:74] duration metric: took 21.010389ms to wait for pod list to return data ...
	I0916 10:31:19.201707 2064308 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:31:19.204845 2064308 default_sa.go:45] found service account: "default"
	I0916 10:31:19.204868 2064308 default_sa.go:55] duration metric: took 3.144001ms for default service account to be created ...
	I0916 10:31:19.204877 2064308 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:31:19.219489 2064308 system_pods.go:86] 18 kube-system pods found
	I0916 10:31:19.219563 2064308 system_pods.go:89] "coredns-7c65d6cfc9-jqthn" [bc44b698-a863-4ab8-85d8-5bd54d141bd3] Running
	I0916 10:31:19.219586 2064308 system_pods.go:89] "csi-hostpath-attacher-0" [b2e317f7-5d09-4cd6-823d-cc849d3bf9e2] Running
	I0916 10:31:19.219605 2064308 system_pods.go:89] "csi-hostpath-resizer-0" [1d31c631-4247-4ffe-8a87-9b11803bc389] Running
	I0916 10:31:19.219640 2064308 system_pods.go:89] "csi-hostpathplugin-r28vj" [ad580f65-4a4a-445c-bec7-b518245397ea] Pending / Ready:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter]) / ContainersReady:ContainersNotReady (containers with unready status: [csi-external-health-monitor-controller node-driver-registrar hostpath liveness-probe csi-provisioner csi-snapshotter])
	I0916 10:31:19.219669 2064308 system_pods.go:89] "etcd-addons-451841" [1a9d168e-83c3-4fc0-b188-1da688352b51] Running
	I0916 10:31:19.219690 2064308 system_pods.go:89] "kindnet-zckxr" [b831bf61-6a35-4681-afe4-f5a0f875a7b5] Running
	I0916 10:31:19.219710 2064308 system_pods.go:89] "kube-apiserver-addons-451841" [db38aff8-23ff-466f-a1b0-ab5f2041183c] Running
	I0916 10:31:19.219728 2064308 system_pods.go:89] "kube-controller-manager-addons-451841" [53d22d78-137d-4306-b8df-d9a55061c9df] Running
	I0916 10:31:19.219766 2064308 system_pods.go:89] "kube-ingress-dns-minikube" [c77fac29-39b0-4e39-94f6-a72ac395b130] Running
	I0916 10:31:19.219784 2064308 system_pods.go:89] "kube-proxy-tltkn" [073e7c9c-1ec1-45db-9603-68862d546266] Running
	I0916 10:31:19.219799 2064308 system_pods.go:89] "kube-scheduler-addons-451841" [5b953c55-7b45-4221-89bf-177d1a4efd05] Running
	I0916 10:31:19.219819 2064308 system_pods.go:89] "metrics-server-84c5f94fbc-q47pm" [2252baaa-acbc-43dd-b38e-e8cd59ac7825] Pending / Ready:ContainersNotReady (containers with unready status: [metrics-server]) / ContainersReady:ContainersNotReady (containers with unready status: [metrics-server])
	I0916 10:31:19.219847 2064308 system_pods.go:89] "nvidia-device-plugin-daemonset-l6r5c" [9645617a-ab29-4c4e-a4ec-4ea217ffb5fb] Running
	I0916 10:31:19.219875 2064308 system_pods.go:89] "registry-66c9cd494c-l957b" [17af08bf-9965-4bec-8d1b-0c4c37167ac1] Pending / Ready:ContainersNotReady (containers with unready status: [registry]) / ContainersReady:ContainersNotReady (containers with unready status: [registry])
	I0916 10:31:19.219896 2064308 system_pods.go:89] "registry-proxy-9cpxl" [10361d7a-9abb-41e6-88eb-2194d01e1301] Pending / Ready:ContainersNotReady (containers with unready status: [registry-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [registry-proxy])
	I0916 10:31:19.219915 2064308 system_pods.go:89] "snapshot-controller-56fcc65765-6llf9" [7febb7b8-15ce-499d-83f7-b82e03e35902] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.219935 2064308 system_pods.go:89] "snapshot-controller-56fcc65765-qxvll" [0a01a9d7-31e0-4965-baba-078f44b17fac] Pending / Ready:ContainersNotReady (containers with unready status: [volume-snapshot-controller]) / ContainersReady:ContainersNotReady (containers with unready status: [volume-snapshot-controller])
	I0916 10:31:19.219968 2064308 system_pods.go:89] "storage-provisioner" [6a86a07f-e25d-4ac9-9c07-dc9ae0048083] Running
	I0916 10:31:19.219989 2064308 system_pods.go:126] duration metric: took 15.104177ms to wait for k8s-apps to be running ...
	I0916 10:31:19.220008 2064308 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:31:19.220090 2064308 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:31:19.263162 2064308 system_svc.go:56] duration metric: took 43.144676ms WaitForService to wait for kubelet
	I0916 10:31:19.263243 2064308 kubeadm.go:582] duration metric: took 29.055714708s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:31:19.263279 2064308 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:31:19.272478 2064308 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:31:19.272561 2064308 node_conditions.go:123] node cpu capacity is 2
	I0916 10:31:19.272591 2064308 node_conditions.go:105] duration metric: took 9.29091ms to run NodePressure ...
	I0916 10:31:19.272616 2064308 start.go:241] waiting for startup goroutines ...
	I0916 10:31:19.305039 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:19.605207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:19.605801 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:19.797193 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:20.099691 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:20.101048 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:20.295291 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:20.597682 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:20.598569 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:20.797887 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:21.096766 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:21.099258 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:21.294755 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:21.597973 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:21.600238 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:21.803444 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:22.097870 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:22.100851 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:22.295006 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:22.597700 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:22.598742 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:22.795839 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:23.096175 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:23.098155 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:23.294814 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:23.596166 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:23.598634 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:23.795172 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:24.096643 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:24.099715 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:24.297255 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:24.598721 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:24.599933 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:24.795260 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:25.098369 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:25.101032 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:25.295093 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:25.597734 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:25.597966 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:25.795323 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:26.096041 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:26.099677 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:26.295063 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:26.597593 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:26.599159 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:26.795825 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:27.098811 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:27.099453 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:27.295012 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:27.597182 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=registry", current state: Pending: [<nil>]
	I0916 10:31:27.601645 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:27.795056 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:28.128064 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:28.129640 2064308 kapi.go:107] duration metric: took 27.037023988s to wait for kubernetes.io/minikube-addons=registry ...
	I0916 10:31:28.325425 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:28.598623 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:28.795615 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:29.104511 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:29.295646 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:29.598962 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:29.795067 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:30.099851 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:30.296647 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:30.598332 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:30.796058 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:31.099992 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:31.294874 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:31.598117 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:31.796531 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:32.098393 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:32.295287 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:32.598055 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:32.795217 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:33.099311 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:33.295339 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:33.598188 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:33.795029 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:34.098345 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:34.295712 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:34.598442 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:34.795386 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:35.098874 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:35.295415 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:35.598136 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:35.795586 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:36.098658 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:36.294379 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:36.598764 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:36.795529 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:37.098523 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:37.296711 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:37.601252 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:37.799472 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:38.100971 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:38.298686 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:38.599535 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:38.795481 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:39.098734 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:39.296827 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:39.611876 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:39.851830 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:40.108718 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:40.295843 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:40.599050 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:40.795575 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:41.098568 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:41.295957 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:41.598039 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:41.796038 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:42.099484 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:42.295707 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:42.598887 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:42.795416 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:43.099107 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:43.295766 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:43.599999 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:43.795242 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:44.098395 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:44.295957 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:44.600054 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:44.794470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:45.100863 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:45.295685 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:45.599065 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:45.798514 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:46.099116 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:46.296057 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:46.599389 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:46.796585 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:47.099083 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:47.296145 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:47.598490 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:47.797079 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:48.100448 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:48.295294 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:48.598227 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:48.794662 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:49.119185 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:49.295351 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:49.598797 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:49.794962 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=csi-hostpath-driver", current state: Pending: [<nil>]
	I0916 10:31:50.098374 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:50.295501 2064308 kapi.go:107] duration metric: took 48.505612662s to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
	I0916 10:31:50.598550 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:51.098277 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:51.598976 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:52.098206 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:52.597960 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:53.098585 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:53.598884 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:54.098582 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:54.598852 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:55.098478 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:55.598212 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:56.098517 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:56.598412 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:57.098499 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:57.598710 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:58.097637 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:58.599134 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:59.098778 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:31:59.598318 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:00.130067 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:00.598955 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:01.098901 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:01.598465 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:02.098925 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:02.598148 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:03.102570 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:03.598295 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:04.099028 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:04.598994 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:05.100186 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:05.598454 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:06.098931 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:06.598336 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:07.098800 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:07.599302 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:08.098401 2064308 kapi.go:96] waiting for pod "app.kubernetes.io/name=ingress-nginx", current state: Pending: [<nil>]
	I0916 10:32:08.599190 2064308 kapi.go:107] duration metric: took 1m7.505513413s to wait for app.kubernetes.io/name=ingress-nginx ...
	I0916 10:32:25.388811 2064308 kapi.go:86] Found 1 Pods for label selector kubernetes.io/minikube-addons=gcp-auth
	I0916 10:32:25.388836 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:25.888825 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:26.388022 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:26.887847 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:27.387834 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:27.888795 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:28.387767 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:28.887542 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:29.388486 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:29.888784 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:30.387676 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:30.888490 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:31.388236 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:31.888242 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:32.387732 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:32.888206 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:33.387868 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:33.887962 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:34.387683 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:34.889279 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:35.388145 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:35.887555 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:36.389045 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:36.887848 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:37.388742 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:37.888016 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:38.388211 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:38.887716 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:39.388708 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:39.888575 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:40.388841 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:40.888385 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:41.388668 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:41.887792 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:42.388021 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:42.888125 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:43.388320 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:43.887796 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:44.388101 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:44.888791 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:45.391207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:45.888190 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:46.387869 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:46.887554 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:47.388470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:47.888177 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:48.387903 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:48.888232 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:49.388449 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:49.888527 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:50.388650 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:50.888495 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:51.388590 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:51.888197 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:52.387563 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:52.888238 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:53.388322 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:53.887557 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:54.388664 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:54.888836 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:55.388171 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:55.888180 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:56.388322 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:56.888567 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:57.388117 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:57.887422 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:58.388230 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:58.887872 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:59.396878 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:32:59.888550 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:00.394252 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:00.887612 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:01.392523 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:01.888091 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:02.393207 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:02.887610 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:03.388745 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:03.888344 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:04.388999 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:04.889012 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:05.390448 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:05.888198 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:06.395413 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:06.889275 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:07.387879 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:07.888183 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:08.388311 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:08.888612 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:09.388334 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:09.887931 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:10.387765 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:10.888317 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:11.388557 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:11.887439 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:12.388213 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:12.887810 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:13.388135 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:13.888239 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:14.388445 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:14.889102 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:15.388533 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:15.887383 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:16.388426 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:16.888022 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:17.388399 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:17.887327 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:18.388016 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:18.887470 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:19.388533 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:19.889124 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:20.387631 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:20.888484 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:21.388124 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:21.887946 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:22.388268 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:22.887332 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:23.388224 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:23.887844 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:24.387744 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:24.888405 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:25.388231 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:25.888672 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:26.388063 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:26.888126 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:27.387865 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:27.887552 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:28.387806 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:28.887772 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:29.388587 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:29.888551 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:30.387903 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:30.887507 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:31.388609 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:31.888449 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:32.388259 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:32.887834 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:33.388141 2064308 kapi.go:96] waiting for pod "kubernetes.io/minikube-addons=gcp-auth", current state: Pending: [<nil>]
	I0916 10:33:33.888934 2064308 kapi.go:107] duration metric: took 2m30.504634261s to wait for kubernetes.io/minikube-addons=gcp-auth ...
	I0916 10:33:33.890859 2064308 out.go:177] * Your GCP credentials will now be mounted into every pod created in the addons-451841 cluster.
	I0916 10:33:33.892432 2064308 out.go:177] * If you don't want your credentials mounted into a specific pod, add a label with the `gcp-auth-skip-secret` key to your pod configuration.
	I0916 10:33:33.893920 2064308 out.go:177] * If you want existing pods to be mounted with credentials, either recreate them or rerun addons enable with --refresh.
	I0916 10:33:33.895584 2064308 out.go:177] * Enabled addons: nvidia-device-plugin, storage-provisioner, ingress-dns, storage-provisioner-rancher, volcano, cloud-spanner, metrics-server, inspektor-gadget, yakd, default-storageclass, volumesnapshots, registry, csi-hostpath-driver, ingress, gcp-auth
	I0916 10:33:33.897279 2064308 addons.go:510] duration metric: took 2m43.689318504s for enable addons: enabled=[nvidia-device-plugin storage-provisioner ingress-dns storage-provisioner-rancher volcano cloud-spanner metrics-server inspektor-gadget yakd default-storageclass volumesnapshots registry csi-hostpath-driver ingress gcp-auth]
	I0916 10:33:33.897342 2064308 start.go:246] waiting for cluster config update ...
	I0916 10:33:33.897367 2064308 start.go:255] writing updated cluster config ...
	I0916 10:33:33.898186 2064308 ssh_runner.go:195] Run: rm -f paused
	I0916 10:33:33.906793 2064308 out.go:177] * Done! kubectl is now configured to use "addons-451841" cluster and "default" namespace by default
	E0916 10:33:33.908425 2064308 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                                     ATTEMPT             POD ID              POD
	89b6e8000a225       4f725bf50aaa5       2 minutes ago       Exited              gadget                                   7                   11fc6773b3e1a       gadget-wjwc2
	f90c8869604c5       6ef582f3ec844       11 minutes ago      Running             gcp-auth                                 0                   9038b6b53facd       gcp-auth-89d5ffd79-pw58v
	6f68aecec6aa2       8b46b1cd48760       12 minutes ago      Running             admission                                0                   e3af2951f3794       volcano-admission-77d7d48b68-sjxcs
	dd63136d8d6ac       289a818c8d9c5       12 minutes ago      Running             controller                               0                   b4699f942aa64       ingress-nginx-controller-bc57996ff-rqhcp
	a490639f0e8aa       ee6d597e62dc8       12 minutes ago      Running             csi-snapshotter                          0                   8260f57befdda       csi-hostpathplugin-r28vj
	317ce7462f733       642ded511e141       12 minutes ago      Running             csi-provisioner                          0                   8260f57befdda       csi-hostpathplugin-r28vj
	324d1501e94dc       420193b27261a       12 minutes ago      Exited              patch                                    2                   13d0568de4b9d       ingress-nginx-admission-patch-z2qc7
	7b9cc7b5195ab       922312104da8a       12 minutes ago      Running             liveness-probe                           0                   8260f57befdda       csi-hostpathplugin-r28vj
	bee97d004dc4d       08f6b2990811a       12 minutes ago      Running             hostpath                                 0                   8260f57befdda       csi-hostpathplugin-r28vj
	19ec6d5fbc0fd       8b46b1cd48760       13 minutes ago      Exited              main                                     0                   009b63594d8b2       volcano-admission-init-bz266
	000463bf50714       d9c7ad4c226bf       13 minutes ago      Running             volcano-scheduler                        0                   d9214c0e709d4       volcano-scheduler-576bc46687-xwjbn
	4dc42ec686d73       1505f556b3a7b       13 minutes ago      Running             volcano-controllers                      0                   343744c6dcf07       volcano-controllers-56675bb4d5-2ltwp
	30f0f6b13e6d5       420193b27261a       13 minutes ago      Exited              create                                   0                   993d0544f3868       ingress-nginx-admission-create-4vr4g
	32df8554c702e       4d1e5c3e97420       13 minutes ago      Running             volume-snapshot-controller               0                   864cff1eb40c5       snapshot-controller-56fcc65765-6llf9
	f28ea158892d3       5548a49bb60ba       13 minutes ago      Running             metrics-server                           0                   f49e908ac9969       metrics-server-84c5f94fbc-q47pm
	7558a63005c7b       0107d56dbc0be       13 minutes ago      Running             node-driver-registrar                    0                   8260f57befdda       csi-hostpathplugin-r28vj
	d181a00ffae8d       4d1e5c3e97420       13 minutes ago      Running             volume-snapshot-controller               0                   5f5c44341cf11       snapshot-controller-56fcc65765-qxvll
	6b271689ecd4e       7ce2150c8929b       13 minutes ago      Running             local-path-provisioner                   0                   de8add92893e8       local-path-provisioner-86d989889c-qkpm6
	98b48c685a09e       487fa743e1e22       13 minutes ago      Running             csi-resizer                              0                   89cb8ade3231b       csi-hostpath-resizer-0
	2472144c5bc6d       1461903ec4fe9       13 minutes ago      Running             csi-external-health-monitor-controller   0                   8260f57befdda       csi-hostpathplugin-r28vj
	0af6491cd95ee       9a80d518f102c       13 minutes ago      Running             csi-attacher                             0                   0d39436266817       csi-hostpath-attacher-0
	9b811a5c5e80c       35508c2f890c4       13 minutes ago      Running             minikube-ingress-dns                     0                   e1ed027bac8d8       kube-ingress-dns-minikube
	5232ad6b096cb       2f6c962e7b831       13 minutes ago      Running             coredns                                  0                   3ad39eb105298       coredns-7c65d6cfc9-jqthn
	4ddb5fa614111       ba04bb24b9575       13 minutes ago      Running             storage-provisioner                      0                   d0cffc65c18c1       storage-provisioner
	64b671b165f6f       6a23fa8fd2b78       13 minutes ago      Running             kindnet-cni                              0                   bd9ef3e1818e4       kindnet-zckxr
	35987f39fe9ef       24a140c548c07       13 minutes ago      Running             kube-proxy                               0                   6a8ebbdde94be       kube-proxy-tltkn
	8769c148a0bb3       27e3830e14027       14 minutes ago      Running             etcd                                     0                   290d52892953c       etcd-addons-451841
	31da3c8e5867c       279f381cb3736       14 minutes ago      Running             kube-controller-manager                  0                   349d5195292e8       kube-controller-manager-addons-451841
	808425f96a229       7f8aa378bb47d       14 minutes ago      Running             kube-scheduler                           0                   50415da17c7f0       kube-scheduler-addons-451841
	2870b9699fd97       d3f53a98c0a9d       14 minutes ago      Running             kube-apiserver                           0                   1d8868dd2cf0d       kube-apiserver-addons-451841
	
	
	==> containerd <==
	Sep 16 10:39:45 addons-451841 containerd[816]: time="2024-09-16T10:39:45.805983765Z" level=info msg="StopPodSandbox for \"14a43ef4433bbf8290362b45be82767f94279b62e77feb2dd404614c282c30a3\""
	Sep 16 10:39:45 addons-451841 containerd[816]: time="2024-09-16T10:39:45.813610307Z" level=info msg="TearDown network for sandbox \"14a43ef4433bbf8290362b45be82767f94279b62e77feb2dd404614c282c30a3\" successfully"
	Sep 16 10:39:45 addons-451841 containerd[816]: time="2024-09-16T10:39:45.813646491Z" level=info msg="StopPodSandbox for \"14a43ef4433bbf8290362b45be82767f94279b62e77feb2dd404614c282c30a3\" returns successfully"
	Sep 16 10:39:45 addons-451841 containerd[816]: time="2024-09-16T10:39:45.814193072Z" level=info msg="RemovePodSandbox for \"14a43ef4433bbf8290362b45be82767f94279b62e77feb2dd404614c282c30a3\""
	Sep 16 10:39:45 addons-451841 containerd[816]: time="2024-09-16T10:39:45.814230849Z" level=info msg="Forcibly stopping sandbox \"14a43ef4433bbf8290362b45be82767f94279b62e77feb2dd404614c282c30a3\""
	Sep 16 10:39:45 addons-451841 containerd[816]: time="2024-09-16T10:39:45.821697064Z" level=info msg="TearDown network for sandbox \"14a43ef4433bbf8290362b45be82767f94279b62e77feb2dd404614c282c30a3\" successfully"
	Sep 16 10:39:45 addons-451841 containerd[816]: time="2024-09-16T10:39:45.828612202Z" level=warning msg="Failed to get podSandbox status for container event for sandboxID \"14a43ef4433bbf8290362b45be82767f94279b62e77feb2dd404614c282c30a3\": an error occurred when try to find sandbox: not found. Sending the event with nil podSandboxStatus."
	Sep 16 10:39:45 addons-451841 containerd[816]: time="2024-09-16T10:39:45.828865189Z" level=info msg="RemovePodSandbox \"14a43ef4433bbf8290362b45be82767f94279b62e77feb2dd404614c282c30a3\" returns successfully"
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.524320026Z" level=info msg="PullImage \"ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\""
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.648373925Z" level=info msg="ImageUpdate event name:\"ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.650196895Z" level=info msg="stop pulling image ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec: active requests=0, bytes read=89"
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.653941954Z" level=info msg="Pulled image \"ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\" with image id \"sha256:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd\", repo tag \"\", repo digest \"ghcr.io/inspektor-gadget/inspektor-gadget@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\", size \"72524105\" in 129.568062ms"
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.653989757Z" level=info msg="PullImage \"ghcr.io/inspektor-gadget/inspektor-gadget:v0.32.0@sha256:03e677e1cf9d2c9bea454e3dbcbcef20b3022e987534a2874eb1abc5bc3e73ec\" returns image reference \"sha256:4f725bf50aaa5c697fbb84c107e9c7a3766f0f85f514ffce712d03ee5f62e8dd\""
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.656693206Z" level=info msg="CreateContainer within sandbox \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\" for container &ContainerMetadata{Name:gadget,Attempt:7,}"
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.677098875Z" level=info msg="CreateContainer within sandbox \"11fc6773b3e1a8e08c3b05ab1b4eefd20fbdfdc9ecb202ef1614e55889b26257\" for &ContainerMetadata{Name:gadget,Attempt:7,} returns container id \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\""
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.677938254Z" level=info msg="StartContainer for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\""
	Sep 16 10:42:28 addons-451841 containerd[816]: time="2024-09-16T10:42:28.730010072Z" level=info msg="StartContainer for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" returns successfully"
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.138310098Z" level=error msg="ExecSync for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" failed" error="failed to exec in container: failed to start exec \"18945d8e8329249f45b429c8d2c629c681f79f2690cb164b744d16d6edaa41f1\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.156036547Z" level=error msg="ExecSync for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" failed" error="failed to exec in container: failed to start exec \"24fda1abd5567e07b5878128ccdadf5a45b72b364dd529766ac153034519db72\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.170223237Z" level=error msg="ExecSync for \"89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df\" failed" error="failed to exec in container: failed to start exec \"b427372ae9babdb028a4500d5cfa0aa496cec7f5f42145c1a1f6f062b2dc9c0c\": OCI runtime exec failed: exec failed: cannot exec in a stopped container: unknown"
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.324237655Z" level=info msg="shim disconnected" id=89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df namespace=k8s.io
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.324320912Z" level=warning msg="cleaning up after shim disconnected" id=89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df namespace=k8s.io
	Sep 16 10:42:30 addons-451841 containerd[816]: time="2024-09-16T10:42:30.324332883Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:42:31 addons-451841 containerd[816]: time="2024-09-16T10:42:31.028529096Z" level=info msg="RemoveContainer for \"4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c\""
	Sep 16 10:42:31 addons-451841 containerd[816]: time="2024-09-16T10:42:31.035424189Z" level=info msg="RemoveContainer for \"4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c\" returns successfully"
	
	
	==> coredns [5232ad6b096cb39cf18a9c11e936d3dae11b081bd6666741f3c42e78161ed09f] <==
	[INFO] 10.244.0.9:45725 - 37874 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000066576s
	[INFO] 10.244.0.9:59523 - 16440 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002202223s
	[INFO] 10.244.0.9:59523 - 22330 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.002459412s
	[INFO] 10.244.0.9:50469 - 36811 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000147232s
	[INFO] 10.244.0.9:50469 - 6599 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000154395s
	[INFO] 10.244.0.9:54670 - 20364 "AAAA IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000113739s
	[INFO] 10.244.0.9:54670 - 51376 "A IN registry.kube-system.svc.cluster.local.kube-system.svc.cluster.local. udp 86 false 512" NXDOMAIN qr,aa,rd 179 0.000392596s
	[INFO] 10.244.0.9:37135 - 16205 "A IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.000064189s
	[INFO] 10.244.0.9:37135 - 64832 "AAAA IN registry.kube-system.svc.cluster.local.svc.cluster.local. udp 74 false 512" NXDOMAIN qr,aa,rd 167 0.00005714s
	[INFO] 10.244.0.9:54223 - 7962 "A IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000052168s
	[INFO] 10.244.0.9:54223 - 14360 "AAAA IN registry.kube-system.svc.cluster.local.cluster.local. udp 70 false 512" NXDOMAIN qr,aa,rd 163 0.000048632s
	[INFO] 10.244.0.9:33840 - 38805 "A IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.001404552s
	[INFO] 10.244.0.9:33840 - 4752 "AAAA IN registry.kube-system.svc.cluster.local.us-east-2.compute.internal. udp 83 false 512" NXDOMAIN qr,rd,ra 83 0.00164702s
	[INFO] 10.244.0.9:45027 - 58736 "AAAA IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 149 0.000075766s
	[INFO] 10.244.0.9:45027 - 39026 "A IN registry.kube-system.svc.cluster.local. udp 56 false 512" NOERROR qr,aa,rd 110 0.000213093s
	[INFO] 10.244.0.24:51483 - 10090 "AAAA IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000202231s
	[INFO] 10.244.0.24:42195 - 64926 "A IN storage.googleapis.com.gcp-auth.svc.cluster.local. udp 78 false 1232" NXDOMAIN qr,aa,rd 160 0.000154649s
	[INFO] 10.244.0.24:32892 - 59527 "AAAA IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000163215s
	[INFO] 10.244.0.24:47611 - 11902 "A IN storage.googleapis.com.svc.cluster.local. udp 69 false 1232" NXDOMAIN qr,aa,rd 151 0.000096418s
	[INFO] 10.244.0.24:59950 - 37722 "AAAA IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.00008466s
	[INFO] 10.244.0.24:52002 - 29131 "A IN storage.googleapis.com.cluster.local. udp 65 false 1232" NXDOMAIN qr,aa,rd 147 0.000089403s
	[INFO] 10.244.0.24:38598 - 65011 "AAAA IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.002266241s
	[INFO] 10.244.0.24:60458 - 11928 "A IN storage.googleapis.com.us-east-2.compute.internal. udp 78 false 1232" NXDOMAIN qr,rd,ra 67 0.001809759s
	[INFO] 10.244.0.24:43975 - 30277 "A IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 610 0.001862182s
	[INFO] 10.244.0.24:51154 - 58482 "AAAA IN storage.googleapis.com. udp 51 false 1232" NOERROR qr,rd,ra 240 0.002138832s
	
	
	==> describe nodes <==
	Name:               addons-451841
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=addons-451841
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=addons-451841
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_30_46_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	                    topology.hostpath.csi/node=addons-451841
	Annotations:        csi.volume.kubernetes.io/nodeid: {"hostpath.csi.k8s.io":"addons-451841"}
	                    kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:30:42 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  addons-451841
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:44:41 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:39 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:44:22 +0000   Mon, 16 Sep 2024 10:30:42 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    addons-451841
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 003b87cb77e5465aa882d8df5f5cd5ab
	  System UUID:                21a29522-aef6-4d70-a29b-0ea27731fdbe
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (22 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  gadget                      gadget-wjwc2                                0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  gcp-auth                    gcp-auth-89d5ffd79-pw58v                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         12m
	  ingress-nginx               ingress-nginx-controller-bc57996ff-rqhcp    100m (5%)     0 (0%)      90Mi (1%)        0 (0%)         13m
	  kube-system                 coredns-7c65d6cfc9-jqthn                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     13m
	  kube-system                 csi-hostpath-attacher-0                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 csi-hostpath-resizer-0                      0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 csi-hostpathplugin-r28vj                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 etcd-addons-451841                          100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         13m
	  kube-system                 kindnet-zckxr                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      13m
	  kube-system                 kube-apiserver-addons-451841                250m (12%)    0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 kube-controller-manager-addons-451841       200m (10%)    0 (0%)      0 (0%)           0 (0%)         14m
	  kube-system                 kube-ingress-dns-minikube                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 kube-proxy-tltkn                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 kube-scheduler-addons-451841                100m (5%)     0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 metrics-server-84c5f94fbc-q47pm             100m (5%)     0 (0%)      200Mi (2%)       0 (0%)         13m
	  kube-system                 snapshot-controller-56fcc65765-6llf9        0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 snapshot-controller-56fcc65765-qxvll        0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  local-path-storage          local-path-provisioner-86d989889c-qkpm6     0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  volcano-system              volcano-admission-77d7d48b68-sjxcs          0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  volcano-system              volcano-controllers-56675bb4d5-2ltwp        0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  volcano-system              volcano-scheduler-576bc46687-xwjbn          0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                1050m (52%)  100m (5%)
	  memory             510Mi (6%)   220Mi (2%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-1Gi      0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	  hugepages-32Mi     0 (0%)       0 (0%)
	  hugepages-64Ki     0 (0%)       0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 13m                kube-proxy       
	  Normal   NodeAllocatableEnforced  14m                kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 14m                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  14m (x8 over 14m)  kubelet          Node addons-451841 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    14m (x7 over 14m)  kubelet          Node addons-451841 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     14m (x7 over 14m)  kubelet          Node addons-451841 status is now: NodeHasSufficientPID
	  Normal   Starting                 14m                kubelet          Starting kubelet.
	  Normal   Starting                 13m                kubelet          Starting kubelet.
	  Warning  CgroupV1                 13m                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  13m                kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  13m                kubelet          Node addons-451841 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    13m                kubelet          Node addons-451841 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     13m                kubelet          Node addons-451841 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           13m                node-controller  Node addons-451841 event: Registered Node addons-451841 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [8769c148a0bb341cc1dcca117d41b6be795d52ed6e49348d14da26aac1d42f01] <==
	{"level":"info","ts":"2024-09-16T10:30:38.620740Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:30:38.620758Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:30:39.574728Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.574952Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.575045Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 1"}
	{"level":"info","ts":"2024-09-16T10:30:39.575117Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575193Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575240Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.575326Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:30:39.581756Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:addons-451841 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:30:39.582006Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:30:39.582134Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:30:39.582418Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:30:39.582525Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:30:39.582434Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.583536Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:30:39.584617Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:30:39.585041Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.590779Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.590980Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:30:39.591021Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:30:39.592081Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:40:40.037545Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":1731}
	{"level":"info","ts":"2024-09-16T10:40:40.103709Z","caller":"mvcc/kvstore_compaction.go:69","msg":"finished scheduled compaction","compact-revision":1731,"took":"65.214596ms","hash":4239490720,"current-db-size-bytes":9076736,"current-db-size":"9.1 MB","current-db-size-in-use-bytes":5177344,"current-db-size-in-use":"5.2 MB"}
	{"level":"info","ts":"2024-09-16T10:40:40.103850Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":4239490720,"revision":1731,"compact-revision":-1}
	
	
	==> gcp-auth [f90c8869604c54edfd93d5ef8e6467ed81e6a63fbedf9c5712f155d5d85f40b8] <==
	2024/09/16 10:33:32 GCP Auth Webhook started!
	2024/09/16 10:38:59 Ready to marshal response ...
	2024/09/16 10:38:59 Ready to write response ...
	2024/09/16 10:38:59 Ready to marshal response ...
	2024/09/16 10:38:59 Ready to write response ...
	2024/09/16 10:38:59 Ready to marshal response ...
	2024/09/16 10:38:59 Ready to write response ...
	
	
	==> kernel <==
	 10:44:44 up 1 day, 14:27,  0 users,  load average: 0.56, 0.44, 1.23
	Linux addons-451841 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [64b671b165f6f7bb28b281ddd3fe708221407f35f09389c964253f52887fd626] <==
	I0916 10:42:41.726413       1 main.go:299] handling current node
	I0916 10:42:51.721369       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:42:51.721412       1 main.go:299] handling current node
	I0916 10:43:01.721636       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:01.721670       1 main.go:299] handling current node
	I0916 10:43:11.730806       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:11.730843       1 main.go:299] handling current node
	I0916 10:43:21.724456       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:21.724489       1 main.go:299] handling current node
	I0916 10:43:31.724543       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:31.724584       1 main.go:299] handling current node
	I0916 10:43:41.727823       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:41.727859       1 main.go:299] handling current node
	I0916 10:43:51.720900       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:43:51.720937       1 main.go:299] handling current node
	I0916 10:44:01.728967       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:01.729002       1 main.go:299] handling current node
	I0916 10:44:11.726855       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:11.726894       1 main.go:299] handling current node
	I0916 10:44:21.723656       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:21.723690       1 main.go:299] handling current node
	I0916 10:44:31.726799       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:31.726835       1 main.go:299] handling current node
	I0916 10:44:41.721111       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:44:41.721149       1 main.go:299] handling current node
	
	
	==> kube-apiserver [2870b9699fd97d290c5750a6361bd1eb6ac986ce8fb7e3f9eb6474155c6b1fa8] <==
	W0916 10:32:02.466465       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:03.481058       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:04.547641       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:05.602664       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.287206       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:06.287255       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:32:06.288974       1 dispatcher.go:225] Failed calling webhook, failing closed mutatepod.volcano.sh: failed calling webhook "mutatepod.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/pods/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.354223       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:06.354265       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:32:06.355906       1 dispatcher.go:225] Failed calling webhook, failing closed mutatepod.volcano.sh: failed calling webhook "mutatepod.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/pods/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:06.623074       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:07.661520       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:08.760490       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:09.841622       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:10.917089       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:11.983956       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:13.046405       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:14.089526       1 dispatcher.go:225] Failed calling webhook, failing closed mutatequeue.volcano.sh: failed calling webhook "mutatequeue.volcano.sh": failed to call webhook: Post "https://volcano-admission-service.volcano-system.svc:443/queues/mutate?timeout=10s": dial tcp 10.104.39.137:443: connect: connection refused
	W0916 10:32:25.289607       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:32:25.289652       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:33:06.298609       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:33:06.298665       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	W0916 10:33:06.363205       1 dispatcher.go:210] Failed calling webhook, failing open gcp-auth-mutate.k8s.io: failed calling webhook "gcp-auth-mutate.k8s.io": failed to call webhook: Post "https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s": dial tcp 10.106.12.131:443: connect: connection refused
	E0916 10:33:06.363253       1 dispatcher.go:214] "Unhandled Error" err="failed calling webhook \"gcp-auth-mutate.k8s.io\": failed to call webhook: Post \"https://gcp-auth.gcp-auth.svc:443/mutate?timeout=10s\": dial tcp 10.106.12.131:443: connect: connection refused" logger="UnhandledError"
	I0916 10:38:59.795161       1 alloc.go:330] "allocated clusterIPs" service="headlamp/headlamp" clusterIPs={"IPv4":"10.111.56.3"}
	
	
	==> kube-controller-manager [31da3c8e5867c3e2a6f4592fba3d201359a6c0c862a2620157496149c91a3b11] <==
	I0916 10:33:09.569582       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:09.577256       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="1s"
	I0916 10:33:33.504028       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="13.523761ms"
	I0916 10:33:33.504493       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="gcp-auth/gcp-auth-89d5ffd79" duration="52.742µs"
	I0916 10:33:39.026367       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:33:39.035647       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:33:39.082659       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-patch" delay="0s"
	I0916 10:33:39.087612       1 job_controller.go:568] "enqueueing job" logger="job-controller" key="gcp-auth/gcp-auth-certs-create" delay="0s"
	I0916 10:33:49.934314       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	I0916 10:38:37.530598       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="yakd-dashboard/yakd-dashboard-67d98fc6b" duration="21.727µs"
	I0916 10:38:43.718435       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/registry-66c9cd494c" duration="10.191µs"
	I0916 10:38:47.665807       1 namespace_controller.go:187] "Namespace has been deleted" logger="namespace-controller" namespace="yakd-dashboard"
	I0916 10:38:56.054829       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	I0916 10:38:58.694110       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/cloud-spanner-emulator-769b77f747" duration="7.114µs"
	I0916 10:38:59.911454       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="86.11631ms"
	I0916 10:38:59.926177       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="14.113318ms"
	I0916 10:38:59.926285       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="67.922µs"
	I0916 10:38:59.926662       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="38.744µs"
	I0916 10:39:03.454274       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="54.203µs"
	I0916 10:39:03.484504       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="10.723572ms"
	I0916 10:39:03.484593       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="51.142µs"
	I0916 10:39:10.433645       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="headlamp/headlamp-57fb76fcdb" duration="6.515µs"
	I0916 10:39:16.650393       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	I0916 10:39:20.588127       1 namespace_controller.go:187] "Namespace has been deleted" logger="namespace-controller" namespace="headlamp"
	I0916 10:44:22.473587       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="addons-451841"
	
	
	==> kube-proxy [35987f39fe9efffcbcdfe8a1694d2541bd561939f35f2770e06a09f005dcf753] <==
	I0916 10:30:51.148935       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:30:51.266541       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:30:51.266602       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:30:51.307434       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:30:51.307506       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:30:51.310004       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:30:51.310401       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:30:51.310420       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:30:51.312344       1 config.go:199] "Starting service config controller"
	I0916 10:30:51.312371       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:30:51.312398       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:30:51.312403       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:30:51.315085       1 config.go:328] "Starting node config controller"
	I0916 10:30:51.315100       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:30:51.413119       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:30:51.413177       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:30:51.415238       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [808425f96a2291f8e0cf3dfea11339a46bc25f8b4e1f82c29efc8eee8e1d729a] <==
	W0916 10:30:43.815016       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:30:43.815095       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.815504       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.815602       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.815794       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:30:43.815882       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.816048       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:30:43.816126       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.816295       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.817022       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817307       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:30:43.817404       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817601       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 10:30:43.817688       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817801       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:30:43.818028       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.817989       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:30:43.818395       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.818315       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 10:30:43.818847       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.818381       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 10:30:43.819065       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:30:43.819318       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:30:43.819478       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:30:44.999991       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:42:31 addons-451841 kubelet[1517]: I0916 10:42:31.023608    1517 scope.go:117] "RemoveContainer" containerID="4bf6f7c832fdd7882f49122b20beed6f7caf3b9e36717efd68feb70fefe1445c"
	Sep 16 10:42:31 addons-451841 kubelet[1517]: I0916 10:42:31.024039    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:42:31 addons-451841 kubelet[1517]: E0916 10:42:31.024217    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:42:32 addons-451841 kubelet[1517]: I0916 10:42:32.112127    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:42:32 addons-451841 kubelet[1517]: E0916 10:42:32.115228    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:42:46 addons-451841 kubelet[1517]: I0916 10:42:46.522949    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:42:46 addons-451841 kubelet[1517]: E0916 10:42:46.523143    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:42:58 addons-451841 kubelet[1517]: I0916 10:42:58.522452    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:42:58 addons-451841 kubelet[1517]: E0916 10:42:58.522667    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:43:12 addons-451841 kubelet[1517]: I0916 10:43:12.522783    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:43:12 addons-451841 kubelet[1517]: E0916 10:43:12.523003    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:43:24 addons-451841 kubelet[1517]: I0916 10:43:24.521816    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:43:24 addons-451841 kubelet[1517]: E0916 10:43:24.522027    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:43:37 addons-451841 kubelet[1517]: I0916 10:43:37.522566    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:43:37 addons-451841 kubelet[1517]: E0916 10:43:37.522820    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:43:52 addons-451841 kubelet[1517]: I0916 10:43:52.522850    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:43:52 addons-451841 kubelet[1517]: E0916 10:43:52.523060    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:44:04 addons-451841 kubelet[1517]: I0916 10:44:04.523350    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:44:04 addons-451841 kubelet[1517]: E0916 10:44:04.523963    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:44:15 addons-451841 kubelet[1517]: I0916 10:44:15.523364    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:44:15 addons-451841 kubelet[1517]: E0916 10:44:15.524085    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:44:29 addons-451841 kubelet[1517]: I0916 10:44:29.522556    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:44:29 addons-451841 kubelet[1517]: E0916 10:44:29.523298    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	Sep 16 10:44:44 addons-451841 kubelet[1517]: I0916 10:44:44.522990    1517 scope.go:117] "RemoveContainer" containerID="89b6e8000a2252f6243af9701f5eef8ce6b6aae2435858a8ac38950fbd6d31df"
	Sep 16 10:44:44 addons-451841 kubelet[1517]: E0916 10:44:44.525296    1517 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gadget\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gadget pod=gadget-wjwc2_gadget(6a752659-ec3c-4841-8e83-fd916caaebc2)\"" pod="gadget/gadget-wjwc2" podUID="6a752659-ec3c-4841-8e83-fd916caaebc2"
	
	
	==> storage-provisioner [4ddb5fa614111a21d93d580947f3eb3b791d38fa6e497e66ae259ff6bb7fed15] <==
	I0916 10:30:56.265937       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:30:56.289948       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:30:56.290011       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:30:56.319402       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:30:56.319890       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"ef239dcc-ec3a-4a4d-b0db-6d9c8de888a1", APIVersion:"v1", ResourceVersion:"601", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157 became leader
	I0916 10:30:56.319948       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157!
	I0916 10:30:56.520389       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_addons-451841_dc46b00a-d366-4b2e-97e6-9793fbb0c157!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p addons-451841 -n addons-451841
helpers_test.go:261: (dbg) Run:  kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (807.986µs)
helpers_test.go:263: kubectl --context addons-451841 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/parallel/CSI (362.50s)

                                                
                                    
x
+
TestAddons/parallel/LocalPath (0s)

                                                
                                                
=== RUN   TestAddons/parallel/LocalPath
=== PAUSE TestAddons/parallel/LocalPath

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/LocalPath
addons_test.go:982: (dbg) Run:  kubectl --context addons-451841 apply -f testdata/storage-provisioner-rancher/pvc.yaml
addons_test.go:982: (dbg) Non-zero exit: kubectl --context addons-451841 apply -f testdata/storage-provisioner-rancher/pvc.yaml: fork/exec /usr/local/bin/kubectl: exec format error (604.82µs)
addons_test.go:984: kubectl apply pvc.yaml failed: args "kubectl --context addons-451841 apply -f testdata/storage-provisioner-rancher/pvc.yaml": fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestAddons/parallel/LocalPath (0.00s)

                                                
                                    
x
+
TestCertOptions (41.75s)

                                                
                                                
=== RUN   TestCertOptions
=== PAUSE TestCertOptions

                                                
                                                

                                                
                                                
=== CONT  TestCertOptions
cert_options_test.go:49: (dbg) Run:  out/minikube-linux-arm64 start -p cert-options-844117 --memory=2048 --apiserver-ips=127.0.0.1 --apiserver-ips=192.168.15.15 --apiserver-names=localhost --apiserver-names=www.google.com --apiserver-port=8555 --driver=docker  --container-runtime=containerd
E0916 11:24:07.674759 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
cert_options_test.go:49: (dbg) Done: out/minikube-linux-arm64 start -p cert-options-844117 --memory=2048 --apiserver-ips=127.0.0.1 --apiserver-ips=192.168.15.15 --apiserver-names=localhost --apiserver-names=www.google.com --apiserver-port=8555 --driver=docker  --container-runtime=containerd: (36.746479783s)
cert_options_test.go:60: (dbg) Run:  out/minikube-linux-arm64 -p cert-options-844117 ssh "openssl x509 -text -noout -in /var/lib/minikube/certs/apiserver.crt"
cert_options_test.go:88: (dbg) Run:  kubectl --context cert-options-844117 config view
cert_options_test.go:88: (dbg) Non-zero exit: kubectl --context cert-options-844117 config view: fork/exec /usr/local/bin/kubectl: exec format error (390.348µs)
cert_options_test.go:90: failed to get kubectl config. args "kubectl --context cert-options-844117 config view" : fork/exec /usr/local/bin/kubectl: exec format error
cert_options_test.go:93: Kubeconfig apiserver server port incorrect. Output of 
'kubectl config view' = ""
cert_options_test.go:100: (dbg) Run:  out/minikube-linux-arm64 ssh -p cert-options-844117 -- "sudo cat /etc/kubernetes/admin.conf"
cert_options_test.go:109: *** TestCertOptions FAILED at 2024-09-16 11:24:38.474619427 +0000 UTC m=+3350.116019584
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestCertOptions]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect cert-options-844117
helpers_test.go:235: (dbg) docker inspect cert-options-844117:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "815eca0e5d589c12d588aad592b8b08575e6b7d0a1761d59e787ded2a4381353",
	        "Created": "2024-09-16T11:24:07.322049232Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2257161,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:24:07.51982622Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/815eca0e5d589c12d588aad592b8b08575e6b7d0a1761d59e787ded2a4381353/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/815eca0e5d589c12d588aad592b8b08575e6b7d0a1761d59e787ded2a4381353/hostname",
	        "HostsPath": "/var/lib/docker/containers/815eca0e5d589c12d588aad592b8b08575e6b7d0a1761d59e787ded2a4381353/hosts",
	        "LogPath": "/var/lib/docker/containers/815eca0e5d589c12d588aad592b8b08575e6b7d0a1761d59e787ded2a4381353/815eca0e5d589c12d588aad592b8b08575e6b7d0a1761d59e787ded2a4381353-json.log",
	        "Name": "/cert-options-844117",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "cert-options-844117:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "cert-options-844117",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8555/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2147483648,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4294967296,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/6a7722ede1d37c1bcd670ab0e0b3f0595af52addc4d4a0fd572da321e01a099f-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/6a7722ede1d37c1bcd670ab0e0b3f0595af52addc4d4a0fd572da321e01a099f/merged",
	                "UpperDir": "/var/lib/docker/overlay2/6a7722ede1d37c1bcd670ab0e0b3f0595af52addc4d4a0fd572da321e01a099f/diff",
	                "WorkDir": "/var/lib/docker/overlay2/6a7722ede1d37c1bcd670ab0e0b3f0595af52addc4d4a0fd572da321e01a099f/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "cert-options-844117",
	                "Source": "/var/lib/docker/volumes/cert-options-844117/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "cert-options-844117",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8555/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "cert-options-844117",
	                "name.minikube.sigs.k8s.io": "cert-options-844117",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "b5ddc930c98644df1e6c4faf6af44712d7b6ee26c6f84c114979a64ca6c2a70d",
	            "SandboxKey": "/var/run/docker/netns/b5ddc930c986",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40852"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40853"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40856"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40854"
	                    }
	                ],
	                "8555/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40855"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "cert-options-844117": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.103.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:67:02",
	                    "DriverOpts": null,
	                    "NetworkID": "651591b343801702a257c27c8bcbbafe57edc12230cdd271cf189c6a7f1501e5",
	                    "EndpointID": "7753e01086194b3bf3dae15e9b4ebc42f3bdb24322b96db05a8988d74e251d7c",
	                    "Gateway": "192.168.103.1",
	                    "IPAddress": "192.168.103.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "cert-options-844117",
	                        "815eca0e5d58"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p cert-options-844117 -n cert-options-844117
helpers_test.go:244: <<< TestCertOptions FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestCertOptions]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p cert-options-844117 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p cert-options-844117 logs -n 25: (1.295607709s)
helpers_test.go:252: TestCertOptions logs: 
-- stdout --
	
	==> Audit <==
	|---------|------------------------------------------------------|---------------------------|---------|---------|---------------------|---------------------|
	| Command |                         Args                         |          Profile          |  User   | Version |     Start Time      |      End Time       |
	|---------|------------------------------------------------------|---------------------------|---------|---------|---------------------|---------------------|
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /etc/docker/daemon.json                              |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo docker                         | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | system info                                          |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl status cri-docker                          |                           |         |         |                     |                     |
	|         | --all --full --no-pager                              |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl cat cri-docker                             |                           |         |         |                     |                     |
	|         | --no-pager                                           |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /etc/systemd/system/cri-docker.service.d/10-cni.conf |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /usr/lib/systemd/system/cri-docker.service           |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | cri-dockerd --version                                |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl status containerd                          |                           |         |         |                     |                     |
	|         | --all --full --no-pager                              |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl cat containerd                             |                           |         |         |                     |                     |
	|         | --no-pager                                           |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /lib/systemd/system/containerd.service               |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /etc/containerd/config.toml                          |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | containerd config dump                               |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl status crio --all                          |                           |         |         |                     |                     |
	|         | --full --no-pager                                    |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl cat crio --no-pager                        |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo find                           | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /etc/crio -type f -exec sh -c                        |                           |         |         |                     |                     |
	|         | 'echo {}; cat {}' \;                                 |                           |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo crio                           | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | config                                               |                           |         |         |                     |                     |
	| delete  | -p cilium-430967                                     | cilium-430967             | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC | 16 Sep 24 11:23 UTC |
	| start   | -p force-systemd-env-836951                          | force-systemd-env-836951  | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC | 16 Sep 24 11:23 UTC |
	|         | --memory=2048                                        |                           |         |         |                     |                     |
	|         | --alsologtostderr                                    |                           |         |         |                     |                     |
	|         | -v=5 --driver=docker                                 |                           |         |         |                     |                     |
	|         | --container-runtime=containerd                       |                           |         |         |                     |                     |
	| delete  | -p kubernetes-upgrade-969540                         | kubernetes-upgrade-969540 | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC | 16 Sep 24 11:23 UTC |
	| start   | -p cert-expiration-617624                            | cert-expiration-617624    | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC | 16 Sep 24 11:24 UTC |
	|         | --memory=2048                                        |                           |         |         |                     |                     |
	|         | --cert-expiration=3m                                 |                           |         |         |                     |                     |
	|         | --driver=docker                                      |                           |         |         |                     |                     |
	|         | --container-runtime=containerd                       |                           |         |         |                     |                     |
	| ssh     | force-systemd-env-836951                             | force-systemd-env-836951  | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC | 16 Sep 24 11:23 UTC |
	|         | ssh cat                                              |                           |         |         |                     |                     |
	|         | /etc/containerd/config.toml                          |                           |         |         |                     |                     |
	| delete  | -p force-systemd-env-836951                          | force-systemd-env-836951  | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC | 16 Sep 24 11:24 UTC |
	| start   | -p cert-options-844117                               | cert-options-844117       | jenkins | v1.34.0 | 16 Sep 24 11:24 UTC | 16 Sep 24 11:24 UTC |
	|         | --memory=2048                                        |                           |         |         |                     |                     |
	|         | --apiserver-ips=127.0.0.1                            |                           |         |         |                     |                     |
	|         | --apiserver-ips=192.168.15.15                        |                           |         |         |                     |                     |
	|         | --apiserver-names=localhost                          |                           |         |         |                     |                     |
	|         | --apiserver-names=www.google.com                     |                           |         |         |                     |                     |
	|         | --apiserver-port=8555                                |                           |         |         |                     |                     |
	|         | --driver=docker                                      |                           |         |         |                     |                     |
	|         | --container-runtime=containerd                       |                           |         |         |                     |                     |
	| ssh     | cert-options-844117 ssh                              | cert-options-844117       | jenkins | v1.34.0 | 16 Sep 24 11:24 UTC | 16 Sep 24 11:24 UTC |
	|         | openssl x509 -text -noout -in                        |                           |         |         |                     |                     |
	|         | /var/lib/minikube/certs/apiserver.crt                |                           |         |         |                     |                     |
	| ssh     | -p cert-options-844117 -- sudo                       | cert-options-844117       | jenkins | v1.34.0 | 16 Sep 24 11:24 UTC | 16 Sep 24 11:24 UTC |
	|         | cat /etc/kubernetes/admin.conf                       |                           |         |         |                     |                     |
	|---------|------------------------------------------------------|---------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:24:01
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:24:01.182629 2256653 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:24:01.182821 2256653 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:24:01.182826 2256653 out.go:358] Setting ErrFile to fd 2...
	I0916 11:24:01.182829 2256653 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:24:01.183114 2256653 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 11:24:01.183659 2256653 out.go:352] Setting JSON to false
	I0916 11:24:01.184719 2256653 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":140784,"bootTime":1726345058,"procs":222,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 11:24:01.184782 2256653 start.go:139] virtualization:  
	I0916 11:24:01.188298 2256653 out.go:177] * [cert-options-844117] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:24:01.190888 2256653 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:24:01.190953 2256653 notify.go:220] Checking for updates...
	I0916 11:24:01.194653 2256653 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:24:01.196926 2256653 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:24:01.198658 2256653 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 11:24:01.200336 2256653 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:24:01.202000 2256653 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:24:01.204154 2256653 config.go:182] Loaded profile config "cert-expiration-617624": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:24:01.204248 2256653 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:24:01.259090 2256653 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:24:01.259241 2256653 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:24:01.383367 2256653 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:54 OomKillDisable:true NGoroutines:73 SystemTime:2024-09-16 11:24:01.365797395 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:24:01.383470 2256653 docker.go:318] overlay module found
	I0916 11:24:01.385515 2256653 out.go:177] * Using the docker driver based on user configuration
	I0916 11:24:01.387154 2256653 start.go:297] selected driver: docker
	I0916 11:24:01.387162 2256653 start.go:901] validating driver "docker" against <nil>
	I0916 11:24:01.387174 2256653 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:24:01.387916 2256653 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:24:01.482140 2256653 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:54 OomKillDisable:true NGoroutines:73 SystemTime:2024-09-16 11:24:01.463683253 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:24:01.482337 2256653 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 11:24:01.482553 2256653 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0916 11:24:01.484595 2256653 out.go:177] * Using Docker driver with root privileges
	I0916 11:24:01.486329 2256653 cni.go:84] Creating CNI manager for ""
	I0916 11:24:01.486387 2256653 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 11:24:01.486394 2256653 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 11:24:01.486475 2256653 start.go:340] cluster config:
	{Name:cert-options-844117 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8555 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:cert-options-844117 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[localhost www.google.com] APIServerIPs:[127.0.
0.1 192.168.15.15] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAge
ntPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:24:01.488326 2256653 out.go:177] * Starting "cert-options-844117" primary control-plane node in "cert-options-844117" cluster
	I0916 11:24:01.490089 2256653 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:24:01.491939 2256653 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:24:01.493572 2256653 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:24:01.493618 2256653 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 11:24:01.493626 2256653 cache.go:56] Caching tarball of preloaded images
	I0916 11:24:01.493713 2256653 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 11:24:01.493720 2256653 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 11:24:01.493831 2256653 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/config.json ...
	I0916 11:24:01.493847 2256653 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/config.json: {Name:mk8180e6fb26161d8a64e594ec26619f7b2b1923 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:01.494004 2256653 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	W0916 11:24:01.531827 2256653 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:24:01.531838 2256653 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:24:01.531918 2256653 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:24:01.531935 2256653 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:24:01.531938 2256653 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:24:01.531946 2256653 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:24:01.531951 2256653 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:24:01.744503 2256653 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:24:01.744529 2256653 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:24:01.744558 2256653 start.go:360] acquireMachinesLock for cert-options-844117: {Name:mkb6227d712d7d968ca39763d246b9459fad0185 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:24:01.744672 2256653 start.go:364] duration metric: took 97.559µs to acquireMachinesLock for "cert-options-844117"
	I0916 11:24:01.744697 2256653 start.go:93] Provisioning new machine with config: &{Name:cert-options-844117 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8555 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:cert-options-844117 Namespace:default APIServerHAVIP: APISe
rverName:minikubeCA APIServerNames:[localhost www.google.com] APIServerIPs:[127.0.0.1 192.168.15.15] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuF
irmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:24:01.744811 2256653 start.go:125] createHost starting for "" (driver="docker")
	I0916 11:23:57.886343 2253129 out.go:235]   - Booting up control plane ...
	I0916 11:23:57.886458 2253129 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:23:57.886538 2253129 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:23:57.889047 2253129 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:23:57.900633 2253129 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:23:57.907040 2253129 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:23:57.907247 2253129 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 11:23:58.012681 2253129 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:23:58.012795 2253129 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:23:59.513400 2253129 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.500887089s
	I0916 11:23:59.513621 2253129 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:24:01.747042 2256653 out.go:235] * Creating docker container (CPUs=2, Memory=2048MB) ...
	I0916 11:24:01.747315 2256653 start.go:159] libmachine.API.Create for "cert-options-844117" (driver="docker")
	I0916 11:24:01.747342 2256653 client.go:168] LocalClient.Create starting
	I0916 11:24:01.747413 2256653 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 11:24:01.747448 2256653 main.go:141] libmachine: Decoding PEM data...
	I0916 11:24:01.747460 2256653 main.go:141] libmachine: Parsing certificate...
	I0916 11:24:01.747516 2256653 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 11:24:01.747532 2256653 main.go:141] libmachine: Decoding PEM data...
	I0916 11:24:01.747545 2256653 main.go:141] libmachine: Parsing certificate...
	I0916 11:24:01.747913 2256653 cli_runner.go:164] Run: docker network inspect cert-options-844117 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 11:24:01.773172 2256653 cli_runner.go:211] docker network inspect cert-options-844117 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 11:24:01.773248 2256653 network_create.go:284] running [docker network inspect cert-options-844117] to gather additional debugging logs...
	I0916 11:24:01.773267 2256653 cli_runner.go:164] Run: docker network inspect cert-options-844117
	W0916 11:24:01.809865 2256653 cli_runner.go:211] docker network inspect cert-options-844117 returned with exit code 1
	I0916 11:24:01.809892 2256653 network_create.go:287] error running [docker network inspect cert-options-844117]: docker network inspect cert-options-844117: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network cert-options-844117 not found
	I0916 11:24:01.809903 2256653 network_create.go:289] output of [docker network inspect cert-options-844117]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network cert-options-844117 not found
	
	** /stderr **
	I0916 11:24:01.810010 2256653 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:24:01.837935 2256653 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-941929ec13d1 IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:02:42:32:84:fe:19} reservation:<nil>}
	I0916 11:24:01.838396 2256653 network.go:211] skipping subnet 192.168.58.0/24 that is taken: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName:br-b138f637362d IfaceIPv4:192.168.58.1 IfaceMTU:1500 IfaceMAC:02:42:81:42:5c:08} reservation:<nil>}
	I0916 11:24:01.839209 2256653 network.go:211] skipping subnet 192.168.67.0/24 that is taken: &{IP:192.168.67.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.67.0/24 Gateway:192.168.67.1 ClientMin:192.168.67.2 ClientMax:192.168.67.254 Broadcast:192.168.67.255 IsPrivate:true Interface:{IfaceName:br-c7e139d3d7f3 IfaceIPv4:192.168.67.1 IfaceMTU:1500 IfaceMAC:02:42:92:83:9d:9a} reservation:<nil>}
	I0916 11:24:01.839815 2256653 network.go:211] skipping subnet 192.168.76.0/24 that is taken: &{IP:192.168.76.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.76.0/24 Gateway:192.168.76.1 ClientMin:192.168.76.2 ClientMax:192.168.76.254 Broadcast:192.168.76.255 IsPrivate:true Interface:{IfaceName:br-1db95f4d52e9 IfaceIPv4:192.168.76.1 IfaceMTU:1500 IfaceMAC:02:42:f7:1e:e5:2e} reservation:<nil>}
	I0916 11:24:01.840274 2256653 network.go:211] skipping subnet 192.168.85.0/24 that is taken: &{IP:192.168.85.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.85.0/24 Gateway:192.168.85.1 ClientMin:192.168.85.2 ClientMax:192.168.85.254 Broadcast:192.168.85.255 IsPrivate:true Interface:{IfaceName:br-82d0b324cf4f IfaceIPv4:192.168.85.1 IfaceMTU:1500 IfaceMAC:02:42:d6:d4:2a:58} reservation:<nil>}
	I0916 11:24:01.840790 2256653 network.go:211] skipping subnet 192.168.94.0/24 that is taken: &{IP:192.168.94.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.94.0/24 Gateway:192.168.94.1 ClientMin:192.168.94.2 ClientMax:192.168.94.254 Broadcast:192.168.94.255 IsPrivate:true Interface:{IfaceName:br-66822a5d117b IfaceIPv4:192.168.94.1 IfaceMTU:1500 IfaceMAC:02:42:cf:08:74:bd} reservation:<nil>}
	I0916 11:24:01.841684 2256653 network.go:206] using free private subnet 192.168.103.0/24: &{IP:192.168.103.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.103.0/24 Gateway:192.168.103.1 ClientMin:192.168.103.2 ClientMax:192.168.103.254 Broadcast:192.168.103.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001a52100}
	I0916 11:24:01.841704 2256653 network_create.go:124] attempt to create docker network cert-options-844117 192.168.103.0/24 with gateway 192.168.103.1 and MTU of 1500 ...
	I0916 11:24:01.841798 2256653 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.103.0/24 --gateway=192.168.103.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=cert-options-844117 cert-options-844117
	I0916 11:24:01.969719 2256653 network_create.go:108] docker network cert-options-844117 192.168.103.0/24 created
	I0916 11:24:01.969740 2256653 kic.go:121] calculated static IP "192.168.103.2" for the "cert-options-844117" container
	I0916 11:24:01.969825 2256653 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:24:01.998904 2256653 cli_runner.go:164] Run: docker volume create cert-options-844117 --label name.minikube.sigs.k8s.io=cert-options-844117 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:24:02.016702 2256653 oci.go:103] Successfully created a docker volume cert-options-844117
	I0916 11:24:02.016789 2256653 cli_runner.go:164] Run: docker run --rm --name cert-options-844117-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=cert-options-844117 --entrypoint /usr/bin/test -v cert-options-844117:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:24:02.806671 2256653 oci.go:107] Successfully prepared a docker volume cert-options-844117
	I0916 11:24:02.806720 2256653 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:24:02.806784 2256653 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:24:02.806856 2256653 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v cert-options-844117:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:24:10.018149 2253129 kubeadm.go:310] [api-check] The API server is healthy after 10.505127756s
	I0916 11:24:10.043411 2253129 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:24:10.058721 2253129 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:24:10.090594 2253129 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:24:10.090872 2253129 kubeadm.go:310] [mark-control-plane] Marking the node cert-expiration-617624 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:24:10.102808 2253129 kubeadm.go:310] [bootstrap-token] Using token: 9xnevm.672dskg38hgh2kz9
	I0916 11:24:07.206253 2256653 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v cert-options-844117:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.399361988s)
	I0916 11:24:07.206273 2256653 kic.go:203] duration metric: took 4.399486204s to extract preloaded images to volume ...
	W0916 11:24:07.206442 2256653 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:24:07.206542 2256653 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:24:07.302630 2256653 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname cert-options-844117 --name cert-options-844117 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=cert-options-844117 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=cert-options-844117 --network cert-options-844117 --ip 192.168.103.2 --volume cert-options-844117:/var --security-opt apparmor=unconfined --memory=2048mb --cpus=2 -e container=docker --expose 8555 --publish=127.0.0.1::8555 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:24:07.769415 2256653 cli_runner.go:164] Run: docker container inspect cert-options-844117 --format={{.State.Running}}
	I0916 11:24:07.792049 2256653 cli_runner.go:164] Run: docker container inspect cert-options-844117 --format={{.State.Status}}
	I0916 11:24:07.812252 2256653 cli_runner.go:164] Run: docker exec cert-options-844117 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:24:07.888645 2256653 oci.go:144] the created container "cert-options-844117" has a running status.
	I0916 11:24:07.888676 2256653 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/cert-options-844117/id_rsa...
	I0916 11:24:08.537057 2256653 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/cert-options-844117/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:24:08.557890 2256653 cli_runner.go:164] Run: docker container inspect cert-options-844117 --format={{.State.Status}}
	I0916 11:24:08.574860 2256653 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:24:08.574871 2256653 kic_runner.go:114] Args: [docker exec --privileged cert-options-844117 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:24:08.642999 2256653 cli_runner.go:164] Run: docker container inspect cert-options-844117 --format={{.State.Status}}
	I0916 11:24:08.666627 2256653 machine.go:93] provisionDockerMachine start ...
	I0916 11:24:08.666725 2256653 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-844117
	I0916 11:24:08.685635 2256653 main.go:141] libmachine: Using SSH client type: native
	I0916 11:24:08.685903 2256653 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40852 <nil> <nil>}
	I0916 11:24:08.685910 2256653 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:24:08.686432 2256653 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:55166->127.0.0.1:40852: read: connection reset by peer
	I0916 11:24:10.104881 2253129 out.go:235]   - Configuring RBAC rules ...
	I0916 11:24:10.105010 2253129 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:24:10.111280 2253129 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:24:10.120681 2253129 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:24:10.127339 2253129 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:24:10.132153 2253129 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:24:10.136474 2253129 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:24:10.425508 2253129 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:24:10.850708 2253129 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 11:24:11.425208 2253129 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 11:24:11.426178 2253129 kubeadm.go:310] 
	I0916 11:24:11.426253 2253129 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 11:24:11.426258 2253129 kubeadm.go:310] 
	I0916 11:24:11.426334 2253129 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 11:24:11.426338 2253129 kubeadm.go:310] 
	I0916 11:24:11.426362 2253129 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 11:24:11.426421 2253129 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:24:11.426472 2253129 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:24:11.426475 2253129 kubeadm.go:310] 
	I0916 11:24:11.426534 2253129 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 11:24:11.426537 2253129 kubeadm.go:310] 
	I0916 11:24:11.426584 2253129 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:24:11.426588 2253129 kubeadm.go:310] 
	I0916 11:24:11.426639 2253129 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 11:24:11.426746 2253129 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:24:11.426814 2253129 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:24:11.426818 2253129 kubeadm.go:310] 
	I0916 11:24:11.426902 2253129 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:24:11.426984 2253129 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 11:24:11.426988 2253129 kubeadm.go:310] 
	I0916 11:24:11.427071 2253129 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 9xnevm.672dskg38hgh2kz9 \
	I0916 11:24:11.427174 2253129 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 11:24:11.427194 2253129 kubeadm.go:310] 	--control-plane 
	I0916 11:24:11.427198 2253129 kubeadm.go:310] 
	I0916 11:24:11.427292 2253129 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:24:11.427296 2253129 kubeadm.go:310] 
	I0916 11:24:11.427377 2253129 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 9xnevm.672dskg38hgh2kz9 \
	I0916 11:24:11.427479 2253129 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 11:24:11.432661 2253129 kubeadm.go:310] W0916 11:23:48.370630    1052 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:24:11.432955 2253129 kubeadm.go:310] W0916 11:23:48.375527    1052 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:24:11.433168 2253129 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:24:11.433274 2253129 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:24:11.433291 2253129 cni.go:84] Creating CNI manager for ""
	I0916 11:24:11.433297 2253129 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 11:24:11.435600 2253129 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 11:24:11.437842 2253129 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 11:24:11.441811 2253129 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 11:24:11.441822 2253129 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 11:24:11.463986 2253129 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 11:24:11.775596 2253129 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 11:24:11.775736 2253129 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:24:11.775811 2253129 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes cert-expiration-617624 minikube.k8s.io/updated_at=2024_09_16T11_24_11_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=cert-expiration-617624 minikube.k8s.io/primary=true
	I0916 11:24:11.993264 2253129 kubeadm.go:1113] duration metric: took 217.576176ms to wait for elevateKubeSystemPrivileges
	I0916 11:24:11.993291 2253129 ops.go:34] apiserver oom_adj: -16
	I0916 11:24:12.108758 2253129 kubeadm.go:394] duration metric: took 24.05426822s to StartCluster
	I0916 11:24:12.108785 2253129 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:12.108852 2253129 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:24:12.109611 2253129 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:12.109829 2253129 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:24:12.109959 2253129 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 11:24:12.110198 2253129 config.go:182] Loaded profile config "cert-expiration-617624": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:24:12.110236 2253129 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:24:12.110299 2253129 addons.go:69] Setting storage-provisioner=true in profile "cert-expiration-617624"
	I0916 11:24:12.110314 2253129 addons.go:234] Setting addon storage-provisioner=true in "cert-expiration-617624"
	I0916 11:24:12.110368 2253129 host.go:66] Checking if "cert-expiration-617624" exists ...
	I0916 11:24:12.111089 2253129 addons.go:69] Setting default-storageclass=true in profile "cert-expiration-617624"
	I0916 11:24:12.111105 2253129 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "cert-expiration-617624"
	I0916 11:24:12.111185 2253129 cli_runner.go:164] Run: docker container inspect cert-expiration-617624 --format={{.State.Status}}
	I0916 11:24:12.111416 2253129 cli_runner.go:164] Run: docker container inspect cert-expiration-617624 --format={{.State.Status}}
	I0916 11:24:12.112647 2253129 out.go:177] * Verifying Kubernetes components...
	I0916 11:24:12.115822 2253129 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:24:12.150840 2253129 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:24:12.154588 2253129 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:24:12.154600 2253129 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 11:24:12.154695 2253129 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-expiration-617624
	I0916 11:24:12.164233 2253129 addons.go:234] Setting addon default-storageclass=true in "cert-expiration-617624"
	I0916 11:24:12.164262 2253129 host.go:66] Checking if "cert-expiration-617624" exists ...
	I0916 11:24:12.164700 2253129 cli_runner.go:164] Run: docker container inspect cert-expiration-617624 --format={{.State.Status}}
	I0916 11:24:12.185672 2253129 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40847 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/cert-expiration-617624/id_rsa Username:docker}
	I0916 11:24:12.214838 2253129 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 11:24:12.214851 2253129 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 11:24:12.214922 2253129 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-expiration-617624
	I0916 11:24:12.236619 2253129 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40847 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/cert-expiration-617624/id_rsa Username:docker}
	I0916 11:24:12.440633 2253129 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:24:12.440805 2253129 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.76.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 11:24:12.461969 2253129 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:24:12.659538 2253129 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 11:24:13.299344 2253129 start.go:971] {"host.minikube.internal": 192.168.76.1} host record injected into CoreDNS's ConfigMap
	I0916 11:24:13.301181 2253129 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:24:13.301233 2253129 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:24:13.536448 2253129 api_server.go:72] duration metric: took 1.426593796s to wait for apiserver process to appear ...
	I0916 11:24:13.536457 2253129 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:24:13.536473 2253129 api_server.go:253] Checking apiserver healthz at https://192.168.76.2:8443/healthz ...
	I0916 11:24:13.537237 2253129 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml: (1.075245267s)
	I0916 11:24:13.562860 2253129 api_server.go:279] https://192.168.76.2:8443/healthz returned 200:
	ok
	I0916 11:24:13.566312 2253129 api_server.go:141] control plane version: v1.31.1
	I0916 11:24:13.566330 2253129 api_server.go:131] duration metric: took 29.866896ms to wait for apiserver health ...
	I0916 11:24:13.566337 2253129 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:24:13.569243 2253129 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 11:24:13.571486 2253129 addons.go:510] duration metric: took 1.461245699s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 11:24:13.579106 2253129 system_pods.go:59] 5 kube-system pods found
	I0916 11:24:13.579135 2253129 system_pods.go:61] "etcd-cert-expiration-617624" [0704aa54-3281-4fcb-b1a5-75b17481a06c] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 11:24:13.579141 2253129 system_pods.go:61] "kube-apiserver-cert-expiration-617624" [e7a00101-4116-4ad4-ad25-dcb7dc5e77ad] Running
	I0916 11:24:13.579150 2253129 system_pods.go:61] "kube-controller-manager-cert-expiration-617624" [6b8742dc-8a14-4ecf-b125-45889bef6f38] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 11:24:13.579156 2253129 system_pods.go:61] "kube-scheduler-cert-expiration-617624" [aadfd1dc-212b-468d-a139-6aaa73acc695] Running / Ready:ContainersNotReady (containers with unready status: [kube-scheduler]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-scheduler])
	I0916 11:24:13.579160 2253129 system_pods.go:61] "storage-provisioner" [6cf44362-6b50-4914-a2a5-2899f008ace2] Pending: PodScheduled:Unschedulable (0/1 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/not-ready: }. preemption: 0/1 nodes are available: 1 Preemption is not helpful for scheduling.)
	I0916 11:24:13.579166 2253129 system_pods.go:74] duration metric: took 12.823023ms to wait for pod list to return data ...
	I0916 11:24:13.579176 2253129 kubeadm.go:582] duration metric: took 1.469326135s to wait for: map[apiserver:true system_pods:true]
	I0916 11:24:13.579188 2253129 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:24:13.583303 2253129 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:24:13.583321 2253129 node_conditions.go:123] node cpu capacity is 2
	I0916 11:24:13.583331 2253129 node_conditions.go:105] duration metric: took 4.139857ms to run NodePressure ...
	I0916 11:24:13.583343 2253129 start.go:241] waiting for startup goroutines ...
	I0916 11:24:13.803345 2253129 kapi.go:214] "coredns" deployment in "kube-system" namespace and "cert-expiration-617624" context rescaled to 1 replicas
	I0916 11:24:13.803368 2253129 start.go:246] waiting for cluster config update ...
	I0916 11:24:13.803379 2253129 start.go:255] writing updated cluster config ...
	I0916 11:24:13.803673 2253129 ssh_runner.go:195] Run: rm -f paused
	I0916 11:24:13.809746 2253129 out.go:177] * Done! kubectl is now configured to use "cert-expiration-617624" cluster and "default" namespace by default
	E0916 11:24:13.811468 2253129 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	I0916 11:24:11.838834 2256653 main.go:141] libmachine: SSH cmd err, output: <nil>: cert-options-844117
	
	I0916 11:24:11.838851 2256653 ubuntu.go:169] provisioning hostname "cert-options-844117"
	I0916 11:24:11.838935 2256653 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-844117
	I0916 11:24:11.874980 2256653 main.go:141] libmachine: Using SSH client type: native
	I0916 11:24:11.875220 2256653 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40852 <nil> <nil>}
	I0916 11:24:11.875230 2256653 main.go:141] libmachine: About to run SSH command:
	sudo hostname cert-options-844117 && echo "cert-options-844117" | sudo tee /etc/hostname
	I0916 11:24:12.058189 2256653 main.go:141] libmachine: SSH cmd err, output: <nil>: cert-options-844117
	
	I0916 11:24:12.058268 2256653 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-844117
	I0916 11:24:12.083308 2256653 main.go:141] libmachine: Using SSH client type: native
	I0916 11:24:12.083545 2256653 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40852 <nil> <nil>}
	I0916 11:24:12.083560 2256653 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\scert-options-844117' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 cert-options-844117/g' /etc/hosts;
				else 
					echo '127.0.1.1 cert-options-844117' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:24:12.261310 2256653 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:24:12.261328 2256653 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 11:24:12.261362 2256653 ubuntu.go:177] setting up certificates
	I0916 11:24:12.261370 2256653 provision.go:84] configureAuth start
	I0916 11:24:12.261430 2256653 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" cert-options-844117
	I0916 11:24:12.286073 2256653 provision.go:143] copyHostCerts
	I0916 11:24:12.286128 2256653 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 11:24:12.286136 2256653 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:24:12.286214 2256653 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 11:24:12.286306 2256653 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 11:24:12.286317 2256653 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:24:12.286346 2256653 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 11:24:12.286394 2256653 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 11:24:12.286397 2256653 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:24:12.286419 2256653 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 11:24:12.286461 2256653 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.cert-options-844117 san=[127.0.0.1 192.168.103.2 cert-options-844117 localhost minikube]
	I0916 11:24:13.093521 2256653 provision.go:177] copyRemoteCerts
	I0916 11:24:13.093580 2256653 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:24:13.093636 2256653 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-844117
	I0916 11:24:13.115271 2256653 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40852 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/cert-options-844117/id_rsa Username:docker}
	I0916 11:24:13.212615 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:24:13.257670 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 11:24:13.291548 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1224 bytes)
	I0916 11:24:13.350533 2256653 provision.go:87] duration metric: took 1.089149999s to configureAuth
	I0916 11:24:13.350550 2256653 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:24:13.350820 2256653 config.go:182] Loaded profile config "cert-options-844117": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:24:13.350827 2256653 machine.go:96] duration metric: took 4.684189993s to provisionDockerMachine
	I0916 11:24:13.350832 2256653 client.go:171] duration metric: took 11.603485164s to LocalClient.Create
	I0916 11:24:13.350846 2256653 start.go:167] duration metric: took 11.603531333s to libmachine.API.Create "cert-options-844117"
	I0916 11:24:13.350851 2256653 start.go:293] postStartSetup for "cert-options-844117" (driver="docker")
	I0916 11:24:13.350860 2256653 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:24:13.350927 2256653 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:24:13.350989 2256653 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-844117
	I0916 11:24:13.388647 2256653 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40852 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/cert-options-844117/id_rsa Username:docker}
	I0916 11:24:13.490375 2256653 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:24:13.496423 2256653 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:24:13.496454 2256653 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:24:13.496463 2256653 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:24:13.496470 2256653 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:24:13.496479 2256653 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 11:24:13.496540 2256653 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 11:24:13.496618 2256653 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 11:24:13.496723 2256653 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:24:13.512109 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:24:13.552554 2256653 start.go:296] duration metric: took 201.689564ms for postStartSetup
	I0916 11:24:13.552911 2256653 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" cert-options-844117
	I0916 11:24:13.581799 2256653 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/config.json ...
	I0916 11:24:13.582177 2256653 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:24:13.582242 2256653 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-844117
	I0916 11:24:13.601203 2256653 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40852 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/cert-options-844117/id_rsa Username:docker}
	I0916 11:24:13.699838 2256653 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:24:13.704559 2256653 start.go:128] duration metric: took 11.959732241s to createHost
	I0916 11:24:13.704573 2256653 start.go:83] releasing machines lock for "cert-options-844117", held for 11.959894447s
	I0916 11:24:13.704662 2256653 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" cert-options-844117
	I0916 11:24:13.723056 2256653 ssh_runner.go:195] Run: cat /version.json
	I0916 11:24:13.723102 2256653 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-844117
	I0916 11:24:13.723373 2256653 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:24:13.723433 2256653 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-844117
	I0916 11:24:13.744026 2256653 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40852 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/cert-options-844117/id_rsa Username:docker}
	I0916 11:24:13.744820 2256653 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40852 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/cert-options-844117/id_rsa Username:docker}
	I0916 11:24:13.978434 2256653 ssh_runner.go:195] Run: systemctl --version
	I0916 11:24:13.985034 2256653 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:24:13.989643 2256653 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 11:24:14.023980 2256653 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:24:14.024061 2256653 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:24:14.065005 2256653 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:24:14.065021 2256653 start.go:495] detecting cgroup driver to use...
	I0916 11:24:14.065058 2256653 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:24:14.065109 2256653 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 11:24:14.080217 2256653 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 11:24:14.093456 2256653 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:24:14.093513 2256653 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:24:14.108270 2256653 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:24:14.124161 2256653 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:24:14.221241 2256653 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:24:14.324171 2256653 docker.go:233] disabling docker service ...
	I0916 11:24:14.324245 2256653 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:24:14.348808 2256653 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:24:14.360679 2256653 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:24:14.479309 2256653 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:24:14.629846 2256653 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:24:14.652314 2256653 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:24:14.673586 2256653 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 11:24:14.685584 2256653 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 11:24:14.697307 2256653 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 11:24:14.697372 2256653 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 11:24:14.708420 2256653 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:24:14.723258 2256653 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 11:24:14.734560 2256653 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:24:14.746080 2256653 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:24:14.756559 2256653 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 11:24:14.768409 2256653 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 11:24:14.783517 2256653 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 11:24:14.795513 2256653 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:24:14.805011 2256653 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:24:14.814291 2256653 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:24:14.918969 2256653 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 11:24:15.118092 2256653 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 11:24:15.118164 2256653 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 11:24:15.125576 2256653 start.go:563] Will wait 60s for crictl version
	I0916 11:24:15.125661 2256653 ssh_runner.go:195] Run: which crictl
	I0916 11:24:15.131711 2256653 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:24:15.200087 2256653 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 11:24:15.200154 2256653 ssh_runner.go:195] Run: containerd --version
	I0916 11:24:15.230192 2256653 ssh_runner.go:195] Run: containerd --version
	I0916 11:24:15.266626 2256653 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 11:24:15.268404 2256653 cli_runner.go:164] Run: docker network inspect cert-options-844117 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:24:15.288859 2256653 ssh_runner.go:195] Run: grep 192.168.103.1	host.minikube.internal$ /etc/hosts
	I0916 11:24:15.292756 2256653 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.103.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:24:15.305139 2256653 kubeadm.go:883] updating cluster {Name:cert-options-844117 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8555 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:cert-options-844117 Namespace:default APIServerHAVIP: APIServerName:minikubeCA
APIServerNames:[localhost www.google.com] APIServerIPs:[127.0.0.1 192.168.15.15] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.103.2 Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwar
ePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:24:15.305247 2256653 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:24:15.305304 2256653 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:24:15.353014 2256653 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 11:24:15.353027 2256653 containerd.go:534] Images already preloaded, skipping extraction
	I0916 11:24:15.353087 2256653 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:24:15.403771 2256653 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 11:24:15.403784 2256653 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:24:15.403791 2256653 kubeadm.go:934] updating node { 192.168.103.2 8555 v1.31.1 containerd true true} ...
	I0916 11:24:15.403886 2256653 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=cert-options-844117 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.103.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:cert-options-844117 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[localhost www.google.com] APIServerIPs:[127.0.0.1 192.168.15.15] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:24:15.403958 2256653 ssh_runner.go:195] Run: sudo crictl info
	I0916 11:24:15.449462 2256653 cni.go:84] Creating CNI manager for ""
	I0916 11:24:15.449474 2256653 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 11:24:15.449489 2256653 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:24:15.449515 2256653 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.103.2 APIServerPort:8555 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:cert-options-844117 NodeName:cert-options-844117 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.103.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.103.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt Sta
ticPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:24:15.449661 2256653 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.103.2
	  bindPort: 8555
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "cert-options-844117"
	  kubeletExtraArgs:
	    node-ip: 192.168.103.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.103.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8555
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:24:15.449740 2256653 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:24:15.469333 2256653 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:24:15.469405 2256653 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 11:24:15.484784 2256653 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (324 bytes)
	I0916 11:24:15.512593 2256653 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:24:15.536120 2256653 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2176 bytes)
	I0916 11:24:15.558495 2256653 ssh_runner.go:195] Run: grep 192.168.103.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:24:15.563355 2256653 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.103.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:24:15.576713 2256653 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:24:15.693850 2256653 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:24:15.715570 2256653 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117 for IP: 192.168.103.2
	I0916 11:24:15.715581 2256653 certs.go:194] generating shared ca certs ...
	I0916 11:24:15.715596 2256653 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:15.715739 2256653 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 11:24:15.715779 2256653 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 11:24:15.715790 2256653 certs.go:256] generating profile certs ...
	I0916 11:24:15.715849 2256653 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/client.key
	I0916 11:24:15.715860 2256653 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/client.crt with IP's: []
	I0916 11:24:16.061668 2256653 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/client.crt ...
	I0916 11:24:16.061688 2256653 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/client.crt: {Name:mk2e9fd49a93824588d3d2c6047ea0e47ad2685b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:16.061901 2256653 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/client.key ...
	I0916 11:24:16.061910 2256653 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/client.key: {Name:mk58b9a9aff3163de7a4c9ac2464f8ce3905826b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:16.062014 2256653 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.key.7a4f3d46
	I0916 11:24:16.062028 2256653 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.crt.7a4f3d46 with IP's: [127.0.0.1 192.168.15.15 10.96.0.1 127.0.0.1 10.0.0.1 192.168.103.2]
	I0916 11:24:16.499538 2256653 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.crt.7a4f3d46 ...
	I0916 11:24:16.499556 2256653 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.crt.7a4f3d46: {Name:mk53de2d53fe82b86bad6b300e7089d327e0808f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:16.499749 2256653 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.key.7a4f3d46 ...
	I0916 11:24:16.499759 2256653 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.key.7a4f3d46: {Name:mk6fe0d350630575385aa65691cffcb4c5b3c397 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:16.499828 2256653 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.crt.7a4f3d46 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.crt
	I0916 11:24:16.499904 2256653 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.key.7a4f3d46 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.key
	I0916 11:24:16.499960 2256653 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/proxy-client.key
	I0916 11:24:16.499974 2256653 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/proxy-client.crt with IP's: []
	I0916 11:24:16.972562 2256653 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/proxy-client.crt ...
	I0916 11:24:16.972580 2256653 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/proxy-client.crt: {Name:mk2b203c1ae68c78249d0e9fcaa3ef6480f93c2f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:16.972827 2256653 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/proxy-client.key ...
	I0916 11:24:16.972836 2256653 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/proxy-client.key: {Name:mkd1810beb9ee3539a77786f0e7073f4ce85f048 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:16.973049 2256653 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 11:24:16.973087 2256653 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 11:24:16.973096 2256653 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:24:16.973119 2256653 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 11:24:16.973142 2256653 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:24:16.973164 2256653 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 11:24:16.973203 2256653 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:24:16.973807 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:24:17.004720 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 11:24:17.042041 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:24:17.074902 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 11:24:17.109986 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1480 bytes)
	I0916 11:24:17.137552 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 11:24:17.163560 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:24:17.188815 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/cert-options-844117/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 11:24:17.214051 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 11:24:17.239886 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 11:24:17.264967 2256653 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:24:17.290012 2256653 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 11:24:17.308979 2256653 ssh_runner.go:195] Run: openssl version
	I0916 11:24:17.314701 2256653 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 11:24:17.327605 2256653 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 11:24:17.333087 2256653 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:24:17.333150 2256653 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 11:24:17.343644 2256653 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 11:24:17.354554 2256653 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 11:24:17.364468 2256653 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 11:24:17.368401 2256653 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:24:17.368467 2256653 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 11:24:17.375936 2256653 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:24:17.385266 2256653 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:24:17.397782 2256653 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:24:17.402155 2256653 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:24:17.402214 2256653 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:24:17.409903 2256653 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:24:17.419484 2256653 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:24:17.422873 2256653 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:24:17.422933 2256653 kubeadm.go:392] StartCluster: {Name:cert-options-844117 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8555 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:cert-options-844117 Namespace:default APIServerHAVIP: APIServerName:minikubeCA AP
IServerNames:[localhost www.google.com] APIServerIPs:[127.0.0.1 192.168.15.15] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.103.2 Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePa
th: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:24:17.423005 2256653 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 11:24:17.423100 2256653 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 11:24:17.466856 2256653 cri.go:89] found id: ""
	I0916 11:24:17.466933 2256653 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 11:24:17.476334 2256653 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 11:24:17.485696 2256653 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 11:24:17.485753 2256653 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 11:24:17.494661 2256653 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 11:24:17.494670 2256653 kubeadm.go:157] found existing configuration files:
	
	I0916 11:24:17.494751 2256653 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/admin.conf
	I0916 11:24:17.503768 2256653 kubeadm.go:163] "https://control-plane.minikube.internal:8555" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 11:24:17.503827 2256653 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 11:24:17.512653 2256653 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/kubelet.conf
	I0916 11:24:17.521399 2256653 kubeadm.go:163] "https://control-plane.minikube.internal:8555" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 11:24:17.521454 2256653 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 11:24:17.530303 2256653 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/controller-manager.conf
	I0916 11:24:17.539146 2256653 kubeadm.go:163] "https://control-plane.minikube.internal:8555" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 11:24:17.539211 2256653 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 11:24:17.548051 2256653 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/scheduler.conf
	I0916 11:24:17.556910 2256653 kubeadm.go:163] "https://control-plane.minikube.internal:8555" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8555 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 11:24:17.556973 2256653 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 11:24:17.565524 2256653 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 11:24:17.606327 2256653 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 11:24:17.606410 2256653 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 11:24:17.626607 2256653 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:24:17.626738 2256653 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:24:17.626773 2256653 kubeadm.go:310] OS: Linux
	I0916 11:24:17.626818 2256653 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 11:24:17.626866 2256653 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 11:24:17.626913 2256653 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 11:24:17.626972 2256653 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 11:24:17.627020 2256653 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 11:24:17.627067 2256653 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 11:24:17.627111 2256653 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 11:24:17.627159 2256653 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 11:24:17.627205 2256653 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 11:24:17.690580 2256653 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 11:24:17.690710 2256653 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 11:24:17.690802 2256653 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 11:24:17.696608 2256653 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 11:24:17.700533 2256653 out.go:235]   - Generating certificates and keys ...
	I0916 11:24:17.700709 2256653 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 11:24:17.700786 2256653 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 11:24:18.678875 2256653 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 11:24:19.446606 2256653 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 11:24:19.755625 2256653 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 11:24:20.229850 2256653 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 11:24:20.573821 2256653 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 11:24:20.574114 2256653 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [cert-options-844117 localhost] and IPs [192.168.103.2 127.0.0.1 ::1]
	I0916 11:24:21.773217 2256653 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 11:24:21.773545 2256653 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [cert-options-844117 localhost] and IPs [192.168.103.2 127.0.0.1 ::1]
	I0916 11:24:23.005213 2256653 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 11:24:23.909685 2256653 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 11:24:24.340713 2256653 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 11:24:24.340975 2256653 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:24:24.663067 2256653 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:24:25.020624 2256653 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:24:25.183212 2256653 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:24:25.697112 2256653 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:24:25.900475 2256653 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:24:25.901343 2256653 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:24:25.904436 2256653 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:24:25.907211 2256653 out.go:235]   - Booting up control plane ...
	I0916 11:24:25.907320 2256653 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:24:25.907406 2256653 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:24:25.908082 2256653 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:24:25.921960 2256653 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:24:25.929577 2256653 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:24:25.929626 2256653 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 11:24:26.030439 2256653 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:24:26.030555 2256653 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:24:27.031596 2256653 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.001388215s
	I0916 11:24:27.031715 2256653 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:24:34.533854 2256653 kubeadm.go:310] [api-check] The API server is healthy after 7.502321087s
	I0916 11:24:34.554857 2256653 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:24:34.569497 2256653 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:24:34.594513 2256653 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:24:34.594743 2256653 kubeadm.go:310] [mark-control-plane] Marking the node cert-options-844117 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:24:34.608315 2256653 kubeadm.go:310] [bootstrap-token] Using token: 5m7deq.4yl0w18risazpig2
	I0916 11:24:34.610248 2256653 out.go:235]   - Configuring RBAC rules ...
	I0916 11:24:34.610373 2256653 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:24:34.616616 2256653 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:24:34.626953 2256653 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:24:34.630447 2256653 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:24:34.634117 2256653 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:24:34.637772 2256653 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:24:34.941544 2256653 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:24:35.373066 2256653 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 11:24:35.940752 2256653 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 11:24:35.944393 2256653 kubeadm.go:310] 
	I0916 11:24:35.944476 2256653 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 11:24:35.944481 2256653 kubeadm.go:310] 
	I0916 11:24:35.944595 2256653 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 11:24:35.944603 2256653 kubeadm.go:310] 
	I0916 11:24:35.944628 2256653 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 11:24:35.944687 2256653 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:24:35.944738 2256653 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:24:35.944741 2256653 kubeadm.go:310] 
	I0916 11:24:35.944815 2256653 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 11:24:35.944830 2256653 kubeadm.go:310] 
	I0916 11:24:35.944884 2256653 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:24:35.944888 2256653 kubeadm.go:310] 
	I0916 11:24:35.944939 2256653 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 11:24:35.945018 2256653 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:24:35.945111 2256653 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:24:35.945117 2256653 kubeadm.go:310] 
	I0916 11:24:35.945212 2256653 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:24:35.945304 2256653 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 11:24:35.945310 2256653 kubeadm.go:310] 
	I0916 11:24:35.945401 2256653 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8555 --token 5m7deq.4yl0w18risazpig2 \
	I0916 11:24:35.945505 2256653 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 11:24:35.945525 2256653 kubeadm.go:310] 	--control-plane 
	I0916 11:24:35.945529 2256653 kubeadm.go:310] 
	I0916 11:24:35.945614 2256653 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:24:35.945617 2256653 kubeadm.go:310] 
	I0916 11:24:35.945703 2256653 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8555 --token 5m7deq.4yl0w18risazpig2 \
	I0916 11:24:35.945805 2256653 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 11:24:35.950507 2256653 kubeadm.go:310] W0916 11:24:17.603099    1054 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:24:35.950841 2256653 kubeadm.go:310] W0916 11:24:17.603961    1054 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:24:35.951068 2256653 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:24:35.951172 2256653 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:24:35.951186 2256653 cni.go:84] Creating CNI manager for ""
	I0916 11:24:35.951194 2256653 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 11:24:35.954661 2256653 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 11:24:35.956854 2256653 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 11:24:35.961146 2256653 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 11:24:35.961156 2256653 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 11:24:35.983417 2256653 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 11:24:36.327081 2256653 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 11:24:36.327218 2256653 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes cert-options-844117 minikube.k8s.io/updated_at=2024_09_16T11_24_36_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=cert-options-844117 minikube.k8s.io/primary=true
	I0916 11:24:36.327218 2256653 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:24:36.601829 2256653 kubeadm.go:1113] duration metric: took 274.686769ms to wait for elevateKubeSystemPrivileges
	I0916 11:24:36.601858 2256653 ops.go:34] apiserver oom_adj: -16
	I0916 11:24:36.601872 2256653 kubeadm.go:394] duration metric: took 19.178943895s to StartCluster
	I0916 11:24:36.601887 2256653 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:36.601951 2256653 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:24:36.602997 2256653 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:24:36.603248 2256653 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.103.2 Port:8555 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:24:36.603333 2256653 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 11:24:36.603581 2256653 config.go:182] Loaded profile config "cert-options-844117": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:24:36.603640 2256653 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:24:36.603698 2256653 addons.go:69] Setting storage-provisioner=true in profile "cert-options-844117"
	I0916 11:24:36.603710 2256653 addons.go:234] Setting addon storage-provisioner=true in "cert-options-844117"
	I0916 11:24:36.603748 2256653 host.go:66] Checking if "cert-options-844117" exists ...
	I0916 11:24:36.604212 2256653 cli_runner.go:164] Run: docker container inspect cert-options-844117 --format={{.State.Status}}
	I0916 11:24:36.604676 2256653 addons.go:69] Setting default-storageclass=true in profile "cert-options-844117"
	I0916 11:24:36.604694 2256653 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "cert-options-844117"
	I0916 11:24:36.604986 2256653 cli_runner.go:164] Run: docker container inspect cert-options-844117 --format={{.State.Status}}
	I0916 11:24:36.606286 2256653 out.go:177] * Verifying Kubernetes components...
	I0916 11:24:36.608410 2256653 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:24:36.657483 2256653 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:24:36.660669 2256653 addons.go:234] Setting addon default-storageclass=true in "cert-options-844117"
	I0916 11:24:36.660706 2256653 host.go:66] Checking if "cert-options-844117" exists ...
	I0916 11:24:36.661163 2256653 cli_runner.go:164] Run: docker container inspect cert-options-844117 --format={{.State.Status}}
	I0916 11:24:36.663154 2256653 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:24:36.663165 2256653 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 11:24:36.663230 2256653 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-844117
	I0916 11:24:36.698810 2256653 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40852 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/cert-options-844117/id_rsa Username:docker}
	I0916 11:24:36.707124 2256653 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 11:24:36.707137 2256653 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 11:24:36.707202 2256653 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" cert-options-844117
	I0916 11:24:36.734349 2256653 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40852 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/cert-options-844117/id_rsa Username:docker}
	I0916 11:24:36.854474 2256653 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:24:36.854545 2256653 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.103.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 11:24:36.898790 2256653 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:24:36.898848 2256653 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:24:36.901951 2256653 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:24:36.955172 2256653 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 11:24:37.296660 2256653 start.go:971] {"host.minikube.internal": 192.168.103.1} host record injected into CoreDNS's ConfigMap
	I0916 11:24:37.298028 2256653 api_server.go:72] duration metric: took 694.752658ms to wait for apiserver process to appear ...
	I0916 11:24:37.298036 2256653 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:24:37.298048 2256653 api_server.go:253] Checking apiserver healthz at https://192.168.103.2:8555/healthz ...
	I0916 11:24:37.320663 2256653 api_server.go:279] https://192.168.103.2:8555/healthz returned 200:
	ok
	I0916 11:24:37.324753 2256653 api_server.go:141] control plane version: v1.31.1
	I0916 11:24:37.324770 2256653 api_server.go:131] duration metric: took 26.728706ms to wait for apiserver health ...
	I0916 11:24:37.324778 2256653 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:24:37.333922 2256653 system_pods.go:59] 4 kube-system pods found
	I0916 11:24:37.333947 2256653 system_pods.go:61] "etcd-cert-options-844117" [47fe24d1-ccbd-4659-9d1d-2a2e4cb4e197] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 11:24:37.333956 2256653 system_pods.go:61] "kube-apiserver-cert-options-844117" [ffc200c7-4828-49b8-a0c2-a4b3c9bd9910] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 11:24:37.333963 2256653 system_pods.go:61] "kube-controller-manager-cert-options-844117" [f764fb70-b31d-45cd-99b3-91466613fcab] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 11:24:37.333970 2256653 system_pods.go:61] "kube-scheduler-cert-options-844117" [38380875-5265-4147-aaa2-5e3cf266e7ad] Running / Ready:ContainersNotReady (containers with unready status: [kube-scheduler]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-scheduler])
	I0916 11:24:37.333976 2256653 system_pods.go:74] duration metric: took 9.192841ms to wait for pod list to return data ...
	I0916 11:24:37.333986 2256653 kubeadm.go:582] duration metric: took 730.713647ms to wait for: map[apiserver:true system_pods:true]
	I0916 11:24:37.333996 2256653 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:24:37.337427 2256653 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:24:37.337445 2256653 node_conditions.go:123] node cpu capacity is 2
	I0916 11:24:37.337455 2256653 node_conditions.go:105] duration metric: took 3.455168ms to run NodePressure ...
	I0916 11:24:37.337466 2256653 start.go:241] waiting for startup goroutines ...
	I0916 11:24:37.596429 2256653 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 11:24:37.597919 2256653 addons.go:510] duration metric: took 994.300722ms for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 11:24:37.801432 2256653 kapi.go:214] "coredns" deployment in "kube-system" namespace and "cert-options-844117" context rescaled to 1 replicas
	I0916 11:24:37.801464 2256653 start.go:246] waiting for cluster config update ...
	I0916 11:24:37.801475 2256653 start.go:255] writing updated cluster config ...
	I0916 11:24:37.801798 2256653 ssh_runner.go:195] Run: rm -f paused
	I0916 11:24:37.807759 2256653 out.go:177] * Done! kubectl is now configured to use "cert-options-844117" cluster and "default" namespace by default
	E0916 11:24:37.809699 2256653 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	4df431f97d423       d3f53a98c0a9d       11 seconds ago      Running             kube-apiserver            0                   faf1c5fcfa8c9       kube-apiserver-cert-options-844117
	0b9d8fc8a6771       7f8aa378bb47d       11 seconds ago      Running             kube-scheduler            0                   64513a63326bf       kube-scheduler-cert-options-844117
	0d0f83c23b83a       279f381cb3736       11 seconds ago      Running             kube-controller-manager   0                   8bf4b8fd66ff9       kube-controller-manager-cert-options-844117
	e44b682b6487c       27e3830e14027       12 seconds ago      Running             etcd                      0                   d423a01f6b400       etcd-cert-options-844117
	
	
	==> containerd <==
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.518011396Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.535333420Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.535418162Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.535435549Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.535548065Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.547253097Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:etcd-cert-options-844117,Uid:ea3f84c29a8be9234267d8a27770c011,Namespace:kube-system,Attempt:0,} returns sandbox id \"d423a01f6b4002a1be409dec72a3b6809ced355280d31c5f24e9448ecd1a1943\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.551561413Z" level=info msg="CreateContainer within sandbox \"d423a01f6b4002a1be409dec72a3b6809ced355280d31c5f24e9448ecd1a1943\" for container &ContainerMetadata{Name:etcd,Attempt:0,}"
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.599907709Z" level=info msg="CreateContainer within sandbox \"d423a01f6b4002a1be409dec72a3b6809ced355280d31c5f24e9448ecd1a1943\" for &ContainerMetadata{Name:etcd,Attempt:0,} returns container id \"e44b682b6487c555bff210dab5d28b62927daa308478dd314547b00b059bfd71\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.601849113Z" level=info msg="StartContainer for \"e44b682b6487c555bff210dab5d28b62927daa308478dd314547b00b059bfd71\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.626966645Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:kube-scheduler-cert-options-844117,Uid:11850c64def695e9bdc4024560affd76,Namespace:kube-system,Attempt:0,} returns sandbox id \"64513a63326bfbc0ca4288c9126ac33403dcecdc7ac5da3efe557c4b7beeab20\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.631174309Z" level=info msg="CreateContainer within sandbox \"64513a63326bfbc0ca4288c9126ac33403dcecdc7ac5da3efe557c4b7beeab20\" for container &ContainerMetadata{Name:kube-scheduler,Attempt:0,}"
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.639578355Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:kube-controller-manager-cert-options-844117,Uid:8242b462d3299686c5c35d57108ec349,Namespace:kube-system,Attempt:0,} returns sandbox id \"8bf4b8fd66ff96f7786b04d16fe23f06ee683901044d096a562e65a269d70716\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.644703486Z" level=info msg="CreateContainer within sandbox \"8bf4b8fd66ff96f7786b04d16fe23f06ee683901044d096a562e65a269d70716\" for container &ContainerMetadata{Name:kube-controller-manager,Attempt:0,}"
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.650282137Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:kube-apiserver-cert-options-844117,Uid:1cbda566f5c0b313c7ecb82521c3206e,Namespace:kube-system,Attempt:0,} returns sandbox id \"faf1c5fcfa8c94ab1c50fc6376baef508f5ede0e7478a17a6406519351fc0c7f\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.655156833Z" level=info msg="CreateContainer within sandbox \"faf1c5fcfa8c94ab1c50fc6376baef508f5ede0e7478a17a6406519351fc0c7f\" for container &ContainerMetadata{Name:kube-apiserver,Attempt:0,}"
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.688799616Z" level=info msg="CreateContainer within sandbox \"8bf4b8fd66ff96f7786b04d16fe23f06ee683901044d096a562e65a269d70716\" for &ContainerMetadata{Name:kube-controller-manager,Attempt:0,} returns container id \"0d0f83c23b83ae7a94fc2d24eef1ac384a6a9b068d8f7e0d6806d89c4fe17736\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.689775586Z" level=info msg="StartContainer for \"0d0f83c23b83ae7a94fc2d24eef1ac384a6a9b068d8f7e0d6806d89c4fe17736\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.697445006Z" level=info msg="CreateContainer within sandbox \"64513a63326bfbc0ca4288c9126ac33403dcecdc7ac5da3efe557c4b7beeab20\" for &ContainerMetadata{Name:kube-scheduler,Attempt:0,} returns container id \"0b9d8fc8a6771cb5d7ec75db26bb0a92ea90bd22829d738149174dc1c58ef65c\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.704545338Z" level=info msg="StartContainer for \"0b9d8fc8a6771cb5d7ec75db26bb0a92ea90bd22829d738149174dc1c58ef65c\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.728128069Z" level=info msg="StartContainer for \"e44b682b6487c555bff210dab5d28b62927daa308478dd314547b00b059bfd71\" returns successfully"
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.729917597Z" level=info msg="CreateContainer within sandbox \"faf1c5fcfa8c94ab1c50fc6376baef508f5ede0e7478a17a6406519351fc0c7f\" for &ContainerMetadata{Name:kube-apiserver,Attempt:0,} returns container id \"4df431f97d42347674ba0de069903fe662476cb456b5bc53d2ed55d4bf1f851d\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.730844575Z" level=info msg="StartContainer for \"4df431f97d42347674ba0de069903fe662476cb456b5bc53d2ed55d4bf1f851d\""
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.845901418Z" level=info msg="StartContainer for \"0d0f83c23b83ae7a94fc2d24eef1ac384a6a9b068d8f7e0d6806d89c4fe17736\" returns successfully"
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.974188608Z" level=info msg="StartContainer for \"0b9d8fc8a6771cb5d7ec75db26bb0a92ea90bd22829d738149174dc1c58ef65c\" returns successfully"
	Sep 16 11:24:27 cert-options-844117 containerd[824]: time="2024-09-16T11:24:27.974367783Z" level=info msg="StartContainer for \"4df431f97d42347674ba0de069903fe662476cb456b5bc53d2ed55d4bf1f851d\" returns successfully"
	
	
	==> describe nodes <==
	Name:               cert-options-844117
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=cert-options-844117
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=cert-options-844117
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_24_36_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:24:32 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  cert-options-844117
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:24:35 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:24:35 +0000   Mon, 16 Sep 2024 11:24:28 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:24:35 +0000   Mon, 16 Sep 2024 11:24:28 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:24:35 +0000   Mon, 16 Sep 2024 11:24:28 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:24:35 +0000   Mon, 16 Sep 2024 11:24:33 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.103.2
	  Hostname:    cert-options-844117
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 4d6bf58406a24d7899bb06cbd90fa192
	  System UUID:                7e12f1c5-4cf6-425f-af85-e92ca6b00b62
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (5 in total)
	  Namespace                   Name                                           CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                           ------------  ----------  ---------------  -------------  ---
	  kube-system                 etcd-cert-options-844117                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         4s
	  kube-system                 kube-apiserver-cert-options-844117             250m (12%)    0 (0%)      0 (0%)           0 (0%)         4s
	  kube-system                 kube-controller-manager-cert-options-844117    200m (10%)    0 (0%)      0 (0%)           0 (0%)         4s
	  kube-system                 kube-scheduler-cert-options-844117             100m (5%)     0 (0%)      0 (0%)           0 (0%)         4s
	  kube-system                 storage-provisioner                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         2s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                650m (32%)  0 (0%)
	  memory             100Mi (1%)  0 (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Warning  CgroupV1                 13s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   Starting                 13s                kubelet          Starting kubelet.
	  Normal   NodeAllocatableEnforced  12s                kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  12s (x8 over 12s)  kubelet          Node cert-options-844117 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    12s (x7 over 12s)  kubelet          Node cert-options-844117 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     12s (x7 over 12s)  kubelet          Node cert-options-844117 status is now: NodeHasSufficientPID
	  Normal   Starting                 4s                 kubelet          Starting kubelet.
	  Warning  CgroupV1                 4s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  4s                 kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  4s                 kubelet          Node cert-options-844117 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    4s                 kubelet          Node cert-options-844117 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4s                 kubelet          Node cert-options-844117 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           0s                 node-controller  Node cert-options-844117 event: Registered Node cert-options-844117 in Controller
	
	
	==> dmesg <==
	[Sep16 11:22] overlayfs: failed to resolve '/var/lib/containerd/io.containerd.snapshotter.v1.overlayfs/snapshots/25/fs': -2
	
	
	==> etcd [e44b682b6487c555bff210dab5d28b62927daa308478dd314547b00b059bfd71] <==
	{"level":"info","ts":"2024-09-16T11:24:27.771381Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T11:24:27.771648Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.103.2:2380"}
	{"level":"info","ts":"2024-09-16T11:24:27.771665Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.103.2:2380"}
	{"level":"info","ts":"2024-09-16T11:24:27.771820Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"f23060b075c4c089","initial-advertise-peer-urls":["https://192.168.103.2:2380"],"listen-peer-urls":["https://192.168.103.2:2380"],"advertise-client-urls":["https://192.168.103.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.103.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:24:27.771843Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:24:27.856699Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"f23060b075c4c089 is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T11:24:27.856739Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"f23060b075c4c089 became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T11:24:27.856781Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"f23060b075c4c089 received MsgPreVoteResp from f23060b075c4c089 at term 1"}
	{"level":"info","ts":"2024-09-16T11:24:27.856801Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"f23060b075c4c089 became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:24:27.856809Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"f23060b075c4c089 received MsgVoteResp from f23060b075c4c089 at term 2"}
	{"level":"info","ts":"2024-09-16T11:24:27.856820Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"f23060b075c4c089 became leader at term 2"}
	{"level":"info","ts":"2024-09-16T11:24:27.856828Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: f23060b075c4c089 elected leader f23060b075c4c089 at term 2"}
	{"level":"info","ts":"2024-09-16T11:24:27.857794Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"f23060b075c4c089","local-member-attributes":"{Name:cert-options-844117 ClientURLs:[https://192.168.103.2:2379]}","request-path":"/0/members/f23060b075c4c089/attributes","cluster-id":"3336683c081d149d","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:24:27.858992Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:24:27.860844Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:24:27.860978Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:24:27.861517Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:24:27.861540Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:24:27.862111Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"3336683c081d149d","local-member-id":"f23060b075c4c089","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:24:27.863893Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:24:27.864077Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:24:27.866222Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:24:27.866437Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:24:27.868034Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.103.2:2379"}
	{"level":"info","ts":"2024-09-16T11:24:27.870563Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	
	
	==> kernel <==
	 11:24:39 up 1 day, 15:07,  0 users,  load average: 4.68, 4.02, 3.15
	Linux cert-options-844117 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kube-apiserver [4df431f97d42347674ba0de069903fe662476cb456b5bc53d2ed55d4bf1f851d] <==
	I0916 11:24:32.961339       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 11:24:32.961356       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 11:24:32.961441       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 11:24:32.961636       1 aggregator.go:171] initial CRD sync complete...
	I0916 11:24:32.961653       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 11:24:32.961657       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 11:24:32.961663       1 cache.go:39] Caches are synced for autoregister controller
	I0916 11:24:32.963393       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 11:24:32.979397       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 11:24:33.012209       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 11:24:33.044617       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:24:33.746736       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0916 11:24:33.751714       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0916 11:24:33.751740       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:24:34.424321       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 11:24:34.469149       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 11:24:34.603511       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 11:24:34.615396       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.103.2]
	I0916 11:24:34.617368       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:24:34.623381       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 11:24:34.870515       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 11:24:35.353670       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 11:24:35.371148       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 11:24:35.383931       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 11:24:39.675744       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	
	
	==> kube-controller-manager [0d0f83c23b83ae7a94fc2d24eef1ac384a6a9b068d8f7e0d6806d89c4fe17736] <==
	I0916 11:24:39.616664       1 shared_informer.go:320] Caches are synced for PVC protection
	I0916 11:24:39.616791       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 11:24:39.617021       1 shared_informer.go:320] Caches are synced for TTL
	I0916 11:24:39.617125       1 shared_informer.go:320] Caches are synced for ephemeral
	I0916 11:24:39.617615       1 shared_informer.go:320] Caches are synced for legacy-service-account-token-cleaner
	I0916 11:24:39.617830       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 11:24:39.621122       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 11:24:39.621176       1 shared_informer.go:320] Caches are synced for GC
	I0916 11:24:39.624232       1 shared_informer.go:320] Caches are synced for namespace
	I0916 11:24:39.630143       1 shared_informer.go:320] Caches are synced for service account
	I0916 11:24:39.634464       1 shared_informer.go:320] Caches are synced for bootstrap_signer
	I0916 11:24:39.645958       1 shared_informer.go:320] Caches are synced for crt configmap
	I0916 11:24:39.676637       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 11:24:39.679753       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 11:24:39.704809       1 shared_informer.go:320] Caches are synced for taint
	I0916 11:24:39.705459       1 node_lifecycle_controller.go:1232] "Initializing eviction metric for zone" logger="node-lifecycle-controller" zone=""
	I0916 11:24:39.705835       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="cert-options-844117"
	I0916 11:24:39.705938       1 node_lifecycle_controller.go:1078] "Controller detected that zone is now in new state" logger="node-lifecycle-controller" zone="" newState="Normal"
	I0916 11:24:39.723789       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 11:24:39.730476       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="cert-options-844117"
	I0916 11:24:39.751530       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 11:24:39.757150       1 shared_informer.go:320] Caches are synced for endpoint
	I0916 11:24:39.768317       1 shared_informer.go:320] Caches are synced for endpoint_slice_mirroring
	I0916 11:24:39.770795       1 shared_informer.go:320] Caches are synced for ClusterRoleAggregator
	I0916 11:24:39.820873       1 shared_informer.go:320] Caches are synced for attach detach
	
	
	==> kube-scheduler [0b9d8fc8a6771cb5d7ec75db26bb0a92ea90bd22829d738149174dc1c58ef65c] <==
	W0916 11:24:32.987593       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 11:24:32.987613       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:24:32.987773       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 11:24:32.987794       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:24:33.817379       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 11:24:33.817428       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:24:33.830160       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 11:24:33.830458       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:24:33.885184       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0916 11:24:33.885801       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 11:24:33.987652       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 11:24:33.987883       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:24:34.003244       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 11:24:34.003521       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:24:34.040100       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 11:24:34.040338       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 11:24:34.086717       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 11:24:34.086974       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 11:24:34.113497       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 11:24:34.113716       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:24:34.136740       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 11:24:34.136792       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:24:34.221713       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 11:24:34.222184       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	I0916 11:24:37.067317       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705148    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ca-certificates\" (UniqueName: \"kubernetes.io/host-path/1cbda566f5c0b313c7ecb82521c3206e-etc-ca-certificates\") pod \"kube-apiserver-cert-options-844117\" (UID: \"1cbda566f5c0b313c7ecb82521c3206e\") " pod="kube-system/kube-apiserver-cert-options-844117"
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705171    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"k8s-certs\" (UniqueName: \"kubernetes.io/host-path/8242b462d3299686c5c35d57108ec349-k8s-certs\") pod \"kube-controller-manager-cert-options-844117\" (UID: \"8242b462d3299686c5c35d57108ec349\") " pod="kube-system/kube-controller-manager-cert-options-844117"
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705189    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/host-path/11850c64def695e9bdc4024560affd76-kubeconfig\") pod \"kube-scheduler-cert-options-844117\" (UID: \"11850c64def695e9bdc4024560affd76\") " pod="kube-system/kube-scheduler-cert-options-844117"
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705207    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-data\" (UniqueName: \"kubernetes.io/host-path/ea3f84c29a8be9234267d8a27770c011-etcd-data\") pod \"etcd-cert-options-844117\" (UID: \"ea3f84c29a8be9234267d8a27770c011\") " pod="kube-system/etcd-cert-options-844117"
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705226    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"k8s-certs\" (UniqueName: \"kubernetes.io/host-path/1cbda566f5c0b313c7ecb82521c3206e-k8s-certs\") pod \"kube-apiserver-cert-options-844117\" (UID: \"1cbda566f5c0b313c7ecb82521c3206e\") " pod="kube-system/kube-apiserver-cert-options-844117"
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705248    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/host-path/8242b462d3299686c5c35d57108ec349-ca-certs\") pod \"kube-controller-manager-cert-options-844117\" (UID: \"8242b462d3299686c5c35d57108ec349\") " pod="kube-system/kube-controller-manager-cert-options-844117"
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705266    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"flexvolume-dir\" (UniqueName: \"kubernetes.io/host-path/8242b462d3299686c5c35d57108ec349-flexvolume-dir\") pod \"kube-controller-manager-cert-options-844117\" (UID: \"8242b462d3299686c5c35d57108ec349\") " pod="kube-system/kube-controller-manager-cert-options-844117"
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705289    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-share-ca-certificates\" (UniqueName: \"kubernetes.io/host-path/8242b462d3299686c5c35d57108ec349-usr-local-share-ca-certificates\") pod \"kube-controller-manager-cert-options-844117\" (UID: \"8242b462d3299686c5c35d57108ec349\") " pod="kube-system/kube-controller-manager-cert-options-844117"
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705306    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-share-ca-certificates\" (UniqueName: \"kubernetes.io/host-path/1cbda566f5c0b313c7ecb82521c3206e-usr-share-ca-certificates\") pod \"kube-apiserver-cert-options-844117\" (UID: \"1cbda566f5c0b313c7ecb82521c3206e\") " pod="kube-system/kube-apiserver-cert-options-844117"
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705324    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ca-certificates\" (UniqueName: \"kubernetes.io/host-path/8242b462d3299686c5c35d57108ec349-etc-ca-certificates\") pod \"kube-controller-manager-cert-options-844117\" (UID: \"8242b462d3299686c5c35d57108ec349\") " pod="kube-system/kube-controller-manager-cert-options-844117"
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705344    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubeconfig\" (UniqueName: \"kubernetes.io/host-path/8242b462d3299686c5c35d57108ec349-kubeconfig\") pod \"kube-controller-manager-cert-options-844117\" (UID: \"8242b462d3299686c5c35d57108ec349\") " pod="kube-system/kube-controller-manager-cert-options-844117"
	Sep 16 11:24:35 cert-options-844117 kubelet[1530]: I0916 11:24:35.705360    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-certs\" (UniqueName: \"kubernetes.io/host-path/ea3f84c29a8be9234267d8a27770c011-etcd-certs\") pod \"etcd-cert-options-844117\" (UID: \"ea3f84c29a8be9234267d8a27770c011\") " pod="kube-system/etcd-cert-options-844117"
	Sep 16 11:24:36 cert-options-844117 kubelet[1530]: I0916 11:24:36.268249    1530 apiserver.go:52] "Watching apiserver"
	Sep 16 11:24:36 cert-options-844117 kubelet[1530]: I0916 11:24:36.301643    1530 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world"
	Sep 16 11:24:36 cert-options-844117 kubelet[1530]: E0916 11:24:36.354139    1530 kubelet.go:1915] "Failed creating a mirror pod for" err="pods \"kube-apiserver-cert-options-844117\" already exists" pod="kube-system/kube-apiserver-cert-options-844117"
	Sep 16 11:24:36 cert-options-844117 kubelet[1530]: E0916 11:24:36.355040    1530 kubelet.go:1915] "Failed creating a mirror pod for" err="pods \"etcd-cert-options-844117\" already exists" pod="kube-system/etcd-cert-options-844117"
	Sep 16 11:24:36 cert-options-844117 kubelet[1530]: I0916 11:24:36.444212    1530 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/etcd-cert-options-844117" podStartSLOduration=1.44417172 podStartE2EDuration="1.44417172s" podCreationTimestamp="2024-09-16 11:24:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:24:36.402858622 +0000 UTC m=+1.238285959" watchObservedRunningTime="2024-09-16 11:24:36.44417172 +0000 UTC m=+1.279599057"
	Sep 16 11:24:36 cert-options-844117 kubelet[1530]: I0916 11:24:36.456612    1530 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-apiserver-cert-options-844117" podStartSLOduration=1.456592579 podStartE2EDuration="1.456592579s" podCreationTimestamp="2024-09-16 11:24:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:24:36.444494811 +0000 UTC m=+1.279922198" watchObservedRunningTime="2024-09-16 11:24:36.456592579 +0000 UTC m=+1.292019917"
	Sep 16 11:24:36 cert-options-844117 kubelet[1530]: I0916 11:24:36.471569    1530 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-controller-manager-cert-options-844117" podStartSLOduration=1.471548587 podStartE2EDuration="1.471548587s" podCreationTimestamp="2024-09-16 11:24:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:24:36.456960692 +0000 UTC m=+1.292388054" watchObservedRunningTime="2024-09-16 11:24:36.471548587 +0000 UTC m=+1.306975924"
	Sep 16 11:24:36 cert-options-844117 kubelet[1530]: I0916 11:24:36.492894    1530 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-scheduler-cert-options-844117" podStartSLOduration=1.492872155 podStartE2EDuration="1.492872155s" podCreationTimestamp="2024-09-16 11:24:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:24:36.471968998 +0000 UTC m=+1.307396336" watchObservedRunningTime="2024-09-16 11:24:36.492872155 +0000 UTC m=+1.328299534"
	Sep 16 11:24:39 cert-options-844117 kubelet[1530]: I0916 11:24:39.779260    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kslmd\" (UniqueName: \"kubernetes.io/projected/f5adfb98-3837-4efc-bbf7-0d23e8569703-kube-api-access-kslmd\") pod \"storage-provisioner\" (UID: \"f5adfb98-3837-4efc-bbf7-0d23e8569703\") " pod="kube-system/storage-provisioner"
	Sep 16 11:24:39 cert-options-844117 kubelet[1530]: I0916 11:24:39.779321    1530 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/f5adfb98-3837-4efc-bbf7-0d23e8569703-tmp\") pod \"storage-provisioner\" (UID: \"f5adfb98-3837-4efc-bbf7-0d23e8569703\") " pod="kube-system/storage-provisioner"
	Sep 16 11:24:39 cert-options-844117 kubelet[1530]: E0916 11:24:39.891064    1530 projected.go:288] Couldn't get configMap kube-system/kube-root-ca.crt: configmap "kube-root-ca.crt" not found
	Sep 16 11:24:39 cert-options-844117 kubelet[1530]: E0916 11:24:39.891114    1530 projected.go:194] Error preparing data for projected volume kube-api-access-kslmd for pod kube-system/storage-provisioner: configmap "kube-root-ca.crt" not found
	Sep 16 11:24:39 cert-options-844117 kubelet[1530]: E0916 11:24:39.891198    1530 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f5adfb98-3837-4efc-bbf7-0d23e8569703-kube-api-access-kslmd podName:f5adfb98-3837-4efc-bbf7-0d23e8569703 nodeName:}" failed. No retries permitted until 2024-09-16 11:24:40.39117389 +0000 UTC m=+5.226601227 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-kslmd" (UniqueName: "kubernetes.io/projected/f5adfb98-3837-4efc-bbf7-0d23e8569703-kube-api-access-kslmd") pod "storage-provisioner" (UID: "f5adfb98-3837-4efc-bbf7-0d23e8569703") : configmap "kube-root-ca.crt" not found
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p cert-options-844117 -n cert-options-844117
helpers_test.go:261: (dbg) Run:  kubectl --context cert-options-844117 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context cert-options-844117 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (543.054µs)
helpers_test.go:263: kubectl --context cert-options-844117 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:175: Cleaning up "cert-options-844117" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p cert-options-844117
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p cert-options-844117: (2.113910501s)
--- FAIL: TestCertOptions (41.75s)

                                                
                                    
x
+
TestFunctional/serial/KubeContext (2.2s)

                                                
                                                
=== RUN   TestFunctional/serial/KubeContext
functional_test.go:681: (dbg) Run:  kubectl config current-context
functional_test.go:681: (dbg) Non-zero exit: kubectl config current-context: fork/exec /usr/local/bin/kubectl: exec format error (1.250659ms)
functional_test.go:683: failed to get current-context. args "kubectl config current-context" : fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:687: expected current-context = "functional-911502", but got *""*
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/serial/KubeContext]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-911502
helpers_test.go:235: (dbg) docker inspect functional-911502:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1",
	        "Created": "2024-09-16T10:47:14.597354828Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2085675,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:47:14.7319597Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hostname",
	        "HostsPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hosts",
	        "LogPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1-json.log",
	        "Name": "/functional-911502",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-911502:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-911502",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/merged",
	                "UpperDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/diff",
	                "WorkDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-911502",
	                "Source": "/var/lib/docker/volumes/functional-911502/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-911502",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-911502",
	                "name.minikube.sigs.k8s.io": "functional-911502",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "44bb4c6f2ec0f0eef04adb8f886d0e0de7d31ae50612de741bed0ee945b2b75e",
	            "SandboxKey": "/var/run/docker/netns/44bb4c6f2ec0",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40592"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40593"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40596"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40594"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40595"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-911502": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "8c4428adf23c812318456ac17bea5953b33d7961994dfc84c0ff82a45764b662",
	                    "EndpointID": "8b3cc6f2c9f87b61b7e755d7ecd320ed6313887dfb3deab9f4e0858aa1c9fe80",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-911502",
	                        "9bf795605895"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-911502 -n functional-911502
helpers_test.go:244: <<< TestFunctional/serial/KubeContext FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/serial/KubeContext]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 logs -n 25: (1.344344937s)
helpers_test.go:252: TestFunctional/serial/KubeContext logs: 
-- stdout --
	
	==> Audit <==
	|------------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	|  Command   |              Args              |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|------------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| addons     | addons-451841 addons           | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|            | disable metrics-server         |                   |         |         |                     |                     |
	|            | --alsologtostderr -v=1         |                   |         |         |                     |                     |
	| stop       | -p addons-451841               | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	| addons     | enable dashboard -p            | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|            | addons-451841                  |                   |         |         |                     |                     |
	| addons     | disable dashboard -p           | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|            | addons-451841                  |                   |         |         |                     |                     |
	| addons     | disable gvisor -p              | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|            | addons-451841                  |                   |         |         |                     |                     |
	| delete     | -p addons-451841               | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	| start      | -p dockerenv-701218            | dockerenv-701218  | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:46 UTC |
	|            | --driver=docker                |                   |         |         |                     |                     |
	|            | --container-runtime=containerd |                   |         |         |                     |                     |
	| docker-env | --ssh-host --ssh-add -p        | dockerenv-701218  | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|            | dockerenv-701218               |                   |         |         |                     |                     |
	| delete     | -p dockerenv-701218            | dockerenv-701218  | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	| start      | -p nospam-826306 -n=1          | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|            | --memory=2250 --wait=false     |                   |         |         |                     |                     |
	|            | --log_dir=/tmp/nospam-826306   |                   |         |         |                     |                     |
	|            | --driver=docker                |                   |         |         |                     |                     |
	|            | --container-runtime=containerd |                   |         |         |                     |                     |
	| start      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|            | /tmp/nospam-826306 start       |                   |         |         |                     |                     |
	|            | --dry-run                      |                   |         |         |                     |                     |
	| start      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|            | /tmp/nospam-826306 start       |                   |         |         |                     |                     |
	|            | --dry-run                      |                   |         |         |                     |                     |
	| start      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|            | /tmp/nospam-826306 start       |                   |         |         |                     |                     |
	|            | --dry-run                      |                   |         |         |                     |                     |
	| pause      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|            | /tmp/nospam-826306 pause       |                   |         |         |                     |                     |
	| pause      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 pause       |                   |         |         |                     |                     |
	| pause      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 pause       |                   |         |         |                     |                     |
	| unpause    | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 unpause     |                   |         |         |                     |                     |
	| unpause    | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 unpause     |                   |         |         |                     |                     |
	| unpause    | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 unpause     |                   |         |         |                     |                     |
	| stop       | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 stop        |                   |         |         |                     |                     |
	| stop       | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 stop        |                   |         |         |                     |                     |
	| stop       | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 stop        |                   |         |         |                     |                     |
	| delete     | -p nospam-826306               | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	| start      | -p functional-911502           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | --memory=4000                  |                   |         |         |                     |                     |
	|            | --apiserver-port=8441          |                   |         |         |                     |                     |
	|            | --wait=all --driver=docker     |                   |         |         |                     |                     |
	|            | --container-runtime=containerd |                   |         |         |                     |                     |
	| start      | -p functional-911502           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:48 UTC |
	|            | --alsologtostderr -v=8         |                   |         |         |                     |                     |
	|------------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:47:58
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:47:58.785399 2088165 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:47:58.785661 2088165 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:47:58.785692 2088165 out.go:358] Setting ErrFile to fd 2...
	I0916 10:47:58.785711 2088165 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:47:58.786080 2088165 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:47:58.786528 2088165 out.go:352] Setting JSON to false
	I0916 10:47:58.787669 2088165 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":138621,"bootTime":1726345058,"procs":201,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:47:58.787775 2088165 start.go:139] virtualization:  
	I0916 10:47:58.790932 2088165 out.go:177] * [functional-911502] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:47:58.793408 2088165 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:47:58.793528 2088165 notify.go:220] Checking for updates...
	I0916 10:47:58.797761 2088165 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:47:58.799713 2088165 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:47:58.801637 2088165 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:47:58.804187 2088165 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:47:58.806963 2088165 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:47:58.809598 2088165 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:47:58.809697 2088165 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:47:58.832380 2088165 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:47:58.832507 2088165 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:47:58.895646 2088165 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:47:58.885470517 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:47:58.895754 2088165 docker.go:318] overlay module found
	I0916 10:47:58.899884 2088165 out.go:177] * Using the docker driver based on existing profile
	I0916 10:47:58.902322 2088165 start.go:297] selected driver: docker
	I0916 10:47:58.902344 2088165 start.go:901] validating driver "docker" against &{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountU
ID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:47:58.902463 2088165 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:47:58.902578 2088165 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:47:58.953781 2088165 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:47:58.94381861 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aarc
h64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:47:58.954212 2088165 cni.go:84] Creating CNI manager for ""
	I0916 10:47:58.954285 2088165 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:47:58.954337 2088165 start.go:340] cluster config:
	{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQe
muFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:47:58.956566 2088165 out.go:177] * Starting "functional-911502" primary control-plane node in "functional-911502" cluster
	I0916 10:47:58.958887 2088165 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:47:58.961214 2088165 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:47:58.963438 2088165 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:47:58.963795 2088165 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:47:58.963796 2088165 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:47:58.963825 2088165 cache.go:56] Caching tarball of preloaded images
	I0916 10:47:58.963907 2088165 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:47:58.963916 2088165 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:47:58.964022 2088165 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/config.json ...
	W0916 10:47:58.982466 2088165 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:47:58.982488 2088165 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:47:58.982561 2088165 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:47:58.982583 2088165 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:47:58.982591 2088165 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:47:58.982599 2088165 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:47:58.982609 2088165 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:47:58.983989 2088165 image.go:273] response: 
	I0916 10:47:59.121036 2088165 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:47:59.121078 2088165 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:47:59.121125 2088165 start.go:360] acquireMachinesLock for functional-911502: {Name:mk182321dd921c9bc14d73d2af41d001efc879fd Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:47:59.121202 2088165 start.go:364] duration metric: took 53.079µs to acquireMachinesLock for "functional-911502"
	I0916 10:47:59.121226 2088165 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:47:59.121232 2088165 fix.go:54] fixHost starting: 
	I0916 10:47:59.121521 2088165 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:47:59.141783 2088165 fix.go:112] recreateIfNeeded on functional-911502: state=Running err=<nil>
	W0916 10:47:59.141816 2088165 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:47:59.144721 2088165 out.go:177] * Updating the running docker "functional-911502" container ...
	I0916 10:47:59.146980 2088165 machine.go:93] provisionDockerMachine start ...
	I0916 10:47:59.147079 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:47:59.164690 2088165 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:59.164967 2088165 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:47:59.164983 2088165 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:47:59.302200 2088165 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-911502
	
	I0916 10:47:59.302234 2088165 ubuntu.go:169] provisioning hostname "functional-911502"
	I0916 10:47:59.302315 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:47:59.319540 2088165 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:59.319848 2088165 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:47:59.319864 2088165 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-911502 && echo "functional-911502" | sudo tee /etc/hostname
	I0916 10:47:59.483029 2088165 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-911502
	
	I0916 10:47:59.483111 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:47:59.501717 2088165 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:59.501970 2088165 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:47:59.501994 2088165 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-911502' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-911502/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-911502' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:47:59.638820 2088165 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:47:59.638848 2088165 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:47:59.638878 2088165 ubuntu.go:177] setting up certificates
	I0916 10:47:59.638887 2088165 provision.go:84] configureAuth start
	I0916 10:47:59.638945 2088165 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-911502
	I0916 10:47:59.654991 2088165 provision.go:143] copyHostCerts
	I0916 10:47:59.655034 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:47:59.655068 2088165 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:47:59.655080 2088165 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:47:59.655162 2088165 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:47:59.655296 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:47:59.655319 2088165 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:47:59.655328 2088165 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:47:59.655365 2088165 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:47:59.655419 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:47:59.655439 2088165 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:47:59.655446 2088165 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:47:59.655472 2088165 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:47:59.655531 2088165 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.functional-911502 san=[127.0.0.1 192.168.49.2 functional-911502 localhost minikube]
	I0916 10:48:00.409717 2088165 provision.go:177] copyRemoteCerts
	I0916 10:48:00.409931 2088165 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:48:00.410014 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:00.446778 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:00.562791 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:48:00.562870 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:48:00.594618 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:48:00.594736 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1220 bytes)
	I0916 10:48:00.624272 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:48:00.624360 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:48:00.651947 2088165 provision.go:87] duration metric: took 1.013043969s to configureAuth
	I0916 10:48:00.651976 2088165 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:48:00.652177 2088165 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:00.652190 2088165 machine.go:96] duration metric: took 1.505191275s to provisionDockerMachine
	I0916 10:48:00.652199 2088165 start.go:293] postStartSetup for "functional-911502" (driver="docker")
	I0916 10:48:00.652211 2088165 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:48:00.652275 2088165 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:48:00.652323 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:00.669602 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:00.768149 2088165 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:48:00.771695 2088165 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 10:48:00.771754 2088165 command_runner.go:130] > NAME="Ubuntu"
	I0916 10:48:00.771777 2088165 command_runner.go:130] > VERSION_ID="22.04"
	I0916 10:48:00.771789 2088165 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 10:48:00.771795 2088165 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 10:48:00.771799 2088165 command_runner.go:130] > ID=ubuntu
	I0916 10:48:00.771803 2088165 command_runner.go:130] > ID_LIKE=debian
	I0916 10:48:00.771807 2088165 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 10:48:00.771812 2088165 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 10:48:00.771818 2088165 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 10:48:00.771827 2088165 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 10:48:00.771832 2088165 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 10:48:00.771885 2088165 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:48:00.771925 2088165 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:48:00.771940 2088165 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:48:00.771947 2088165 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:48:00.771969 2088165 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:48:00.772032 2088165 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:48:00.772125 2088165 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:48:00.772139 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:48:00.772219 2088165 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts -> hosts in /etc/test/nested/copy/2063326
	I0916 10:48:00.772223 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts -> /etc/test/nested/copy/2063326/hosts
	I0916 10:48:00.772270 2088165 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/2063326
	I0916 10:48:00.781368 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:48:00.806768 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts --> /etc/test/nested/copy/2063326/hosts (40 bytes)
	I0916 10:48:00.831874 2088165 start.go:296] duration metric: took 179.657184ms for postStartSetup
	I0916 10:48:00.832033 2088165 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:48:00.832109 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:00.849667 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:00.947508 2088165 command_runner.go:130] > 21%
	I0916 10:48:00.948282 2088165 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:48:00.953002 2088165 command_runner.go:130] > 155G
	I0916 10:48:00.953615 2088165 fix.go:56] duration metric: took 1.832376025s for fixHost
	I0916 10:48:00.953635 2088165 start.go:83] releasing machines lock for "functional-911502", held for 1.832420973s
	I0916 10:48:00.953725 2088165 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-911502
	I0916 10:48:00.971275 2088165 ssh_runner.go:195] Run: cat /version.json
	I0916 10:48:00.971332 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:00.971591 2088165 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:48:00.971655 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:00.991183 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:01.008321 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:01.090086 2088165 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 10:48:01.090258 2088165 ssh_runner.go:195] Run: systemctl --version
	I0916 10:48:01.217382 2088165 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 10:48:01.220858 2088165 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 10:48:01.220957 2088165 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 10:48:01.221042 2088165 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:48:01.225330 2088165 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 10:48:01.225410 2088165 command_runner.go:130] >   Size: 78        	Blocks: 8          IO Block: 4096   regular file
	I0916 10:48:01.225431 2088165 command_runner.go:130] > Device: 3ch/60d	Inode: 1324618     Links: 1
	I0916 10:48:01.225445 2088165 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:48:01.225451 2088165 command_runner.go:130] > Access: 2024-09-16 10:47:17.590968226 +0000
	I0916 10:48:01.225457 2088165 command_runner.go:130] > Modify: 2024-09-16 10:47:17.562968404 +0000
	I0916 10:48:01.225475 2088165 command_runner.go:130] > Change: 2024-09-16 10:47:17.562968404 +0000
	I0916 10:48:01.225491 2088165 command_runner.go:130] >  Birth: 2024-09-16 10:47:17.562968404 +0000
	I0916 10:48:01.225749 2088165 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:48:01.245017 2088165 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:48:01.245154 2088165 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:48:01.254936 2088165 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:48:01.254965 2088165 start.go:495] detecting cgroup driver to use...
	I0916 10:48:01.255025 2088165 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:48:01.255096 2088165 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:48:01.268726 2088165 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:48:01.281208 2088165 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:48:01.281273 2088165 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:48:01.295687 2088165 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:48:01.308064 2088165 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:48:01.417725 2088165 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:48:01.540113 2088165 docker.go:233] disabling docker service ...
	I0916 10:48:01.540218 2088165 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:48:01.554507 2088165 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:48:01.567763 2088165 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:48:01.684493 2088165 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:48:01.820121 2088165 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:48:01.834026 2088165 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:48:01.852747 2088165 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0916 10:48:01.854272 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:48:01.866074 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:48:01.877013 2088165 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:48:01.877089 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:48:01.887138 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:48:01.898151 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:48:01.908730 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:48:01.919968 2088165 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:48:01.929885 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:48:01.940722 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:48:01.951693 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:48:01.962633 2088165 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:48:01.971719 2088165 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 10:48:01.973238 2088165 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:48:01.983217 2088165 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:02.116216 2088165 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:48:02.423701 2088165 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:48:02.423774 2088165 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:48:02.427450 2088165 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0916 10:48:02.427474 2088165 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 10:48:02.427487 2088165 command_runner.go:130] > Device: 45h/69d	Inode: 620         Links: 1
	I0916 10:48:02.427494 2088165 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:48:02.427502 2088165 command_runner.go:130] > Access: 2024-09-16 10:48:02.406683973 +0000
	I0916 10:48:02.427507 2088165 command_runner.go:130] > Modify: 2024-09-16 10:48:02.314684556 +0000
	I0916 10:48:02.427513 2088165 command_runner.go:130] > Change: 2024-09-16 10:48:02.314684556 +0000
	I0916 10:48:02.427517 2088165 command_runner.go:130] >  Birth: -
	I0916 10:48:02.427535 2088165 start.go:563] Will wait 60s for crictl version
	I0916 10:48:02.427599 2088165 ssh_runner.go:195] Run: which crictl
	I0916 10:48:02.430997 2088165 command_runner.go:130] > /usr/bin/crictl
	I0916 10:48:02.431080 2088165 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:48:02.471685 2088165 command_runner.go:130] > Version:  0.1.0
	I0916 10:48:02.471711 2088165 command_runner.go:130] > RuntimeName:  containerd
	I0916 10:48:02.471725 2088165 command_runner.go:130] > RuntimeVersion:  1.7.22
	I0916 10:48:02.471730 2088165 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 10:48:02.474385 2088165 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:48:02.474457 2088165 ssh_runner.go:195] Run: containerd --version
	I0916 10:48:02.501051 2088165 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 10:48:02.502956 2088165 ssh_runner.go:195] Run: containerd --version
	I0916 10:48:02.529886 2088165 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 10:48:02.535085 2088165 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:48:02.537930 2088165 cli_runner.go:164] Run: docker network inspect functional-911502 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:48:02.552276 2088165 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:48:02.556374 2088165 command_runner.go:130] > 192.168.49.1	host.minikube.internal
	I0916 10:48:02.556587 2088165 kubeadm.go:883] updating cluster {Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA API
ServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryM
irror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:48:02.556724 2088165 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:48:02.556849 2088165 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:02.594114 2088165 command_runner.go:130] > {
	I0916 10:48:02.594134 2088165 command_runner.go:130] >   "images": [
	I0916 10:48:02.594141 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594151 2088165 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 10:48:02.594156 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594163 2088165 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 10:48:02.594167 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594171 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594180 2088165 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 10:48:02.594183 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594187 2088165 command_runner.go:130] >       "size": "33309097",
	I0916 10:48:02.594195 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.594198 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594204 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594208 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594237 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594240 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594249 2088165 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 10:48:02.594255 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594260 2088165 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 10:48:02.594266 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594270 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594279 2088165 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 10:48:02.594282 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594286 2088165 command_runner.go:130] >       "size": "8034419",
	I0916 10:48:02.594289 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.594294 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594298 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594305 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594309 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594312 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594322 2088165 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 10:48:02.594326 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594335 2088165 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 10:48:02.594346 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594350 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594358 2088165 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 10:48:02.594362 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594366 2088165 command_runner.go:130] >       "size": "16948420",
	I0916 10:48:02.594372 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.594376 2088165 command_runner.go:130] >       "username": "nonroot",
	I0916 10:48:02.594382 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594388 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594391 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594397 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594403 2088165 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 10:48:02.594411 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594417 2088165 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 10:48:02.594420 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594425 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594435 2088165 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 10:48:02.594441 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594445 2088165 command_runner.go:130] >       "size": "66535646",
	I0916 10:48:02.594449 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.594453 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.594456 2088165 command_runner.go:130] >       },
	I0916 10:48:02.594460 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594464 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594470 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594474 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594479 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594487 2088165 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 10:48:02.594492 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594498 2088165 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 10:48:02.594504 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594508 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594519 2088165 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 10:48:02.594523 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594529 2088165 command_runner.go:130] >       "size": "25687130",
	I0916 10:48:02.594533 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.594537 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.594541 2088165 command_runner.go:130] >       },
	I0916 10:48:02.594545 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594551 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594555 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594563 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594566 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594573 2088165 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 10:48:02.594579 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594586 2088165 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 10:48:02.594589 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594593 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594602 2088165 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 10:48:02.594608 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594612 2088165 command_runner.go:130] >       "size": "23948670",
	I0916 10:48:02.594616 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.594619 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.594623 2088165 command_runner.go:130] >       },
	I0916 10:48:02.594626 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594630 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594633 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594636 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594639 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594646 2088165 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 10:48:02.594650 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594655 2088165 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 10:48:02.594660 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594664 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594708 2088165 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 10:48:02.594716 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594720 2088165 command_runner.go:130] >       "size": "26756812",
	I0916 10:48:02.594724 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.594727 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594731 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594735 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594738 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594741 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594749 2088165 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 10:48:02.594755 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594761 2088165 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 10:48:02.594764 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594768 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594780 2088165 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 10:48:02.594785 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594789 2088165 command_runner.go:130] >       "size": "18507674",
	I0916 10:48:02.594793 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.594797 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.594803 2088165 command_runner.go:130] >       },
	I0916 10:48:02.594806 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594813 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594817 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594820 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594824 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594831 2088165 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 10:48:02.594837 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594842 2088165 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 10:48:02.594845 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594851 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594859 2088165 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 10:48:02.594865 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594869 2088165 command_runner.go:130] >       "size": "267933",
	I0916 10:48:02.594873 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.594877 2088165 command_runner.go:130] >         "value": "65535"
	I0916 10:48:02.594882 2088165 command_runner.go:130] >       },
	I0916 10:48:02.594886 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594892 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594896 2088165 command_runner.go:130] >       "pinned": true
	I0916 10:48:02.594899 2088165 command_runner.go:130] >     }
	I0916 10:48:02.594903 2088165 command_runner.go:130] >   ]
	I0916 10:48:02.594908 2088165 command_runner.go:130] > }
	I0916 10:48:02.595084 2088165 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:48:02.595096 2088165 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:48:02.595159 2088165 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:02.632598 2088165 command_runner.go:130] > {
	I0916 10:48:02.632619 2088165 command_runner.go:130] >   "images": [
	I0916 10:48:02.632625 2088165 command_runner.go:130] >     {
	I0916 10:48:02.632635 2088165 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 10:48:02.632640 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.632649 2088165 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 10:48:02.632653 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632657 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.632666 2088165 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 10:48:02.632669 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632673 2088165 command_runner.go:130] >       "size": "33309097",
	I0916 10:48:02.632677 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.632681 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.632684 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.632688 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.632692 2088165 command_runner.go:130] >     },
	I0916 10:48:02.632695 2088165 command_runner.go:130] >     {
	I0916 10:48:02.632712 2088165 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 10:48:02.632722 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.632765 2088165 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 10:48:02.632780 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632784 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.632798 2088165 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 10:48:02.632802 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632806 2088165 command_runner.go:130] >       "size": "8034419",
	I0916 10:48:02.632809 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.632813 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.632817 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.632820 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.632824 2088165 command_runner.go:130] >     },
	I0916 10:48:02.632827 2088165 command_runner.go:130] >     {
	I0916 10:48:02.632834 2088165 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 10:48:02.632837 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.632844 2088165 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 10:48:02.632848 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632851 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.632871 2088165 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 10:48:02.632881 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632887 2088165 command_runner.go:130] >       "size": "16948420",
	I0916 10:48:02.632891 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.632895 2088165 command_runner.go:130] >       "username": "nonroot",
	I0916 10:48:02.632898 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.632902 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.632905 2088165 command_runner.go:130] >     },
	I0916 10:48:02.632909 2088165 command_runner.go:130] >     {
	I0916 10:48:02.632915 2088165 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 10:48:02.632919 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.632923 2088165 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 10:48:02.632930 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632934 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.632942 2088165 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 10:48:02.632945 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632949 2088165 command_runner.go:130] >       "size": "66535646",
	I0916 10:48:02.632952 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.632956 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.632960 2088165 command_runner.go:130] >       },
	I0916 10:48:02.632964 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.632975 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.632982 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.632985 2088165 command_runner.go:130] >     },
	I0916 10:48:02.632988 2088165 command_runner.go:130] >     {
	I0916 10:48:02.632995 2088165 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 10:48:02.632998 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.633003 2088165 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 10:48:02.633006 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633010 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.633020 2088165 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 10:48:02.633030 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633034 2088165 command_runner.go:130] >       "size": "25687130",
	I0916 10:48:02.633037 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.633041 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.633044 2088165 command_runner.go:130] >       },
	I0916 10:48:02.633048 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.633052 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.633058 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.633061 2088165 command_runner.go:130] >     },
	I0916 10:48:02.633064 2088165 command_runner.go:130] >     {
	I0916 10:48:02.633071 2088165 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 10:48:02.633074 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.633080 2088165 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 10:48:02.633083 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633086 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.633094 2088165 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 10:48:02.633098 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633102 2088165 command_runner.go:130] >       "size": "23948670",
	I0916 10:48:02.633105 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.633108 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.633112 2088165 command_runner.go:130] >       },
	I0916 10:48:02.633116 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.633120 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.633123 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.633126 2088165 command_runner.go:130] >     },
	I0916 10:48:02.633129 2088165 command_runner.go:130] >     {
	I0916 10:48:02.633135 2088165 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 10:48:02.633139 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.633144 2088165 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 10:48:02.633148 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633152 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.633160 2088165 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 10:48:02.633163 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633167 2088165 command_runner.go:130] >       "size": "26756812",
	I0916 10:48:02.633170 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.633174 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.633177 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.633181 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.633184 2088165 command_runner.go:130] >     },
	I0916 10:48:02.633186 2088165 command_runner.go:130] >     {
	I0916 10:48:02.633193 2088165 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 10:48:02.633196 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.633201 2088165 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 10:48:02.633204 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633208 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.633222 2088165 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 10:48:02.633225 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633229 2088165 command_runner.go:130] >       "size": "18507674",
	I0916 10:48:02.633232 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.633236 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.633239 2088165 command_runner.go:130] >       },
	I0916 10:48:02.633242 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.633246 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.633249 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.633253 2088165 command_runner.go:130] >     },
	I0916 10:48:02.633256 2088165 command_runner.go:130] >     {
	I0916 10:48:02.633263 2088165 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 10:48:02.633266 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.633272 2088165 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 10:48:02.633275 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633279 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.633286 2088165 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 10:48:02.633289 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633293 2088165 command_runner.go:130] >       "size": "267933",
	I0916 10:48:02.633296 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.633300 2088165 command_runner.go:130] >         "value": "65535"
	I0916 10:48:02.633303 2088165 command_runner.go:130] >       },
	I0916 10:48:02.633308 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.633311 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.633315 2088165 command_runner.go:130] >       "pinned": true
	I0916 10:48:02.633317 2088165 command_runner.go:130] >     }
	I0916 10:48:02.633320 2088165 command_runner.go:130] >   ]
	I0916 10:48:02.633323 2088165 command_runner.go:130] > }
	I0916 10:48:02.635227 2088165 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:48:02.635252 2088165 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:48:02.635266 2088165 kubeadm.go:934] updating node { 192.168.49.2 8441 v1.31.1 containerd true true} ...
	I0916 10:48:02.635414 2088165 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=functional-911502 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:48:02.635484 2088165 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:48:02.669886 2088165 command_runner.go:130] > {
	I0916 10:48:02.669909 2088165 command_runner.go:130] >   "status": {
	I0916 10:48:02.669915 2088165 command_runner.go:130] >     "conditions": [
	I0916 10:48:02.669919 2088165 command_runner.go:130] >       {
	I0916 10:48:02.669925 2088165 command_runner.go:130] >         "type": "RuntimeReady",
	I0916 10:48:02.669929 2088165 command_runner.go:130] >         "status": true,
	I0916 10:48:02.669935 2088165 command_runner.go:130] >         "reason": "",
	I0916 10:48:02.669939 2088165 command_runner.go:130] >         "message": ""
	I0916 10:48:02.669943 2088165 command_runner.go:130] >       },
	I0916 10:48:02.669949 2088165 command_runner.go:130] >       {
	I0916 10:48:02.669956 2088165 command_runner.go:130] >         "type": "NetworkReady",
	I0916 10:48:02.669964 2088165 command_runner.go:130] >         "status": true,
	I0916 10:48:02.669968 2088165 command_runner.go:130] >         "reason": "",
	I0916 10:48:02.669974 2088165 command_runner.go:130] >         "message": ""
	I0916 10:48:02.669978 2088165 command_runner.go:130] >       },
	I0916 10:48:02.669981 2088165 command_runner.go:130] >       {
	I0916 10:48:02.669987 2088165 command_runner.go:130] >         "type": "ContainerdHasNoDeprecationWarnings",
	I0916 10:48:02.669993 2088165 command_runner.go:130] >         "status": true,
	I0916 10:48:02.669997 2088165 command_runner.go:130] >         "reason": "",
	I0916 10:48:02.670001 2088165 command_runner.go:130] >         "message": ""
	I0916 10:48:02.670007 2088165 command_runner.go:130] >       }
	I0916 10:48:02.670010 2088165 command_runner.go:130] >     ]
	I0916 10:48:02.670013 2088165 command_runner.go:130] >   },
	I0916 10:48:02.670017 2088165 command_runner.go:130] >   "cniconfig": {
	I0916 10:48:02.670021 2088165 command_runner.go:130] >     "PluginDirs": [
	I0916 10:48:02.670025 2088165 command_runner.go:130] >       "/opt/cni/bin"
	I0916 10:48:02.670030 2088165 command_runner.go:130] >     ],
	I0916 10:48:02.670036 2088165 command_runner.go:130] >     "PluginConfDir": "/etc/cni/net.d",
	I0916 10:48:02.670040 2088165 command_runner.go:130] >     "PluginMaxConfNum": 1,
	I0916 10:48:02.670046 2088165 command_runner.go:130] >     "Prefix": "eth",
	I0916 10:48:02.670054 2088165 command_runner.go:130] >     "Networks": [
	I0916 10:48:02.670060 2088165 command_runner.go:130] >       {
	I0916 10:48:02.670064 2088165 command_runner.go:130] >         "Config": {
	I0916 10:48:02.670070 2088165 command_runner.go:130] >           "Name": "cni-loopback",
	I0916 10:48:02.670074 2088165 command_runner.go:130] >           "CNIVersion": "0.3.1",
	I0916 10:48:02.670080 2088165 command_runner.go:130] >           "Plugins": [
	I0916 10:48:02.670084 2088165 command_runner.go:130] >             {
	I0916 10:48:02.670097 2088165 command_runner.go:130] >               "Network": {
	I0916 10:48:02.670102 2088165 command_runner.go:130] >                 "type": "loopback",
	I0916 10:48:02.670105 2088165 command_runner.go:130] >                 "ipam": {},
	I0916 10:48:02.670109 2088165 command_runner.go:130] >                 "dns": {}
	I0916 10:48:02.670113 2088165 command_runner.go:130] >               },
	I0916 10:48:02.670121 2088165 command_runner.go:130] >               "Source": "{\"type\":\"loopback\"}"
	I0916 10:48:02.670124 2088165 command_runner.go:130] >             }
	I0916 10:48:02.670128 2088165 command_runner.go:130] >           ],
	I0916 10:48:02.670138 2088165 command_runner.go:130] >           "Source": "{\n\"cniVersion\": \"0.3.1\",\n\"name\": \"cni-loopback\",\n\"plugins\": [{\n  \"type\": \"loopback\"\n}]\n}"
	I0916 10:48:02.670144 2088165 command_runner.go:130] >         },
	I0916 10:48:02.670148 2088165 command_runner.go:130] >         "IFName": "lo"
	I0916 10:48:02.670151 2088165 command_runner.go:130] >       },
	I0916 10:48:02.670154 2088165 command_runner.go:130] >       {
	I0916 10:48:02.670159 2088165 command_runner.go:130] >         "Config": {
	I0916 10:48:02.670164 2088165 command_runner.go:130] >           "Name": "kindnet",
	I0916 10:48:02.670170 2088165 command_runner.go:130] >           "CNIVersion": "0.3.1",
	I0916 10:48:02.670176 2088165 command_runner.go:130] >           "Plugins": [
	I0916 10:48:02.670179 2088165 command_runner.go:130] >             {
	I0916 10:48:02.670183 2088165 command_runner.go:130] >               "Network": {
	I0916 10:48:02.670187 2088165 command_runner.go:130] >                 "type": "ptp",
	I0916 10:48:02.670191 2088165 command_runner.go:130] >                 "ipam": {
	I0916 10:48:02.670196 2088165 command_runner.go:130] >                   "type": "host-local"
	I0916 10:48:02.670202 2088165 command_runner.go:130] >                 },
	I0916 10:48:02.670227 2088165 command_runner.go:130] >                 "dns": {}
	I0916 10:48:02.670235 2088165 command_runner.go:130] >               },
	I0916 10:48:02.670252 2088165 command_runner.go:130] >               "Source": "{\"ipMasq\":false,\"ipam\":{\"dataDir\":\"/run/cni-ipam-state\",\"ranges\":[[{\"subnet\":\"10.244.0.0/24\"}]],\"routes\":[{\"dst\":\"0.0.0.0/0\"}],\"type\":\"host-local\"},\"mtu\":1500,\"type\":\"ptp\"}"
	I0916 10:48:02.670260 2088165 command_runner.go:130] >             },
	I0916 10:48:02.670265 2088165 command_runner.go:130] >             {
	I0916 10:48:02.670269 2088165 command_runner.go:130] >               "Network": {
	I0916 10:48:02.670274 2088165 command_runner.go:130] >                 "type": "portmap",
	I0916 10:48:02.670280 2088165 command_runner.go:130] >                 "capabilities": {
	I0916 10:48:02.670284 2088165 command_runner.go:130] >                   "portMappings": true
	I0916 10:48:02.670288 2088165 command_runner.go:130] >                 },
	I0916 10:48:02.670291 2088165 command_runner.go:130] >                 "ipam": {},
	I0916 10:48:02.670295 2088165 command_runner.go:130] >                 "dns": {}
	I0916 10:48:02.670301 2088165 command_runner.go:130] >               },
	I0916 10:48:02.670308 2088165 command_runner.go:130] >               "Source": "{\"capabilities\":{\"portMappings\":true},\"type\":\"portmap\"}"
	I0916 10:48:02.670314 2088165 command_runner.go:130] >             }
	I0916 10:48:02.670318 2088165 command_runner.go:130] >           ],
	I0916 10:48:02.670350 2088165 command_runner.go:130] >           "Source": "\n{\n\t\"cniVersion\": \"0.3.1\",\n\t\"name\": \"kindnet\",\n\t\"plugins\": [\n\t{\n\t\t\"type\": \"ptp\",\n\t\t\"ipMasq\": false,\n\t\t\"ipam\": {\n\t\t\t\"type\": \"host-local\",\n\t\t\t\"dataDir\": \"/run/cni-ipam-state\",\n\t\t\t\"routes\": [\n\t\t\t\t\n\t\t\t\t\n\t\t\t\t{ \"dst\": \"0.0.0.0/0\" }\n\t\t\t],\n\t\t\t\"ranges\": [\n\t\t\t\t\n\t\t\t\t\n\t\t\t\t[ { \"subnet\": \"10.244.0.0/24\" } ]\n\t\t\t]\n\t\t}\n\t\t,\n\t\t\"mtu\": 1500\n\t\t\n\t},\n\t{\n\t\t\"type\": \"portmap\",\n\t\t\"capabilities\": {\n\t\t\t\"portMappings\": true\n\t\t}\n\t}\n\t]\n}\n"
	I0916 10:48:02.670358 2088165 command_runner.go:130] >         },
	I0916 10:48:02.670362 2088165 command_runner.go:130] >         "IFName": "eth0"
	I0916 10:48:02.670365 2088165 command_runner.go:130] >       }
	I0916 10:48:02.670368 2088165 command_runner.go:130] >     ]
	I0916 10:48:02.670371 2088165 command_runner.go:130] >   },
	I0916 10:48:02.670375 2088165 command_runner.go:130] >   "config": {
	I0916 10:48:02.670380 2088165 command_runner.go:130] >     "containerd": {
	I0916 10:48:02.670386 2088165 command_runner.go:130] >       "snapshotter": "overlayfs",
	I0916 10:48:02.670391 2088165 command_runner.go:130] >       "defaultRuntimeName": "runc",
	I0916 10:48:02.670396 2088165 command_runner.go:130] >       "defaultRuntime": {
	I0916 10:48:02.670400 2088165 command_runner.go:130] >         "runtimeType": "",
	I0916 10:48:02.670404 2088165 command_runner.go:130] >         "runtimePath": "",
	I0916 10:48:02.670416 2088165 command_runner.go:130] >         "runtimeEngine": "",
	I0916 10:48:02.670420 2088165 command_runner.go:130] >         "PodAnnotations": null,
	I0916 10:48:02.670424 2088165 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 10:48:02.670428 2088165 command_runner.go:130] >         "runtimeRoot": "",
	I0916 10:48:02.670433 2088165 command_runner.go:130] >         "options": null,
	I0916 10:48:02.670441 2088165 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 10:48:02.670447 2088165 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 10:48:02.670455 2088165 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 10:48:02.670459 2088165 command_runner.go:130] >         "cniConfDir": "",
	I0916 10:48:02.670463 2088165 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 10:48:02.670469 2088165 command_runner.go:130] >         "snapshotter": "",
	I0916 10:48:02.670473 2088165 command_runner.go:130] >         "sandboxMode": ""
	I0916 10:48:02.670478 2088165 command_runner.go:130] >       },
	I0916 10:48:02.670485 2088165 command_runner.go:130] >       "untrustedWorkloadRuntime": {
	I0916 10:48:02.670499 2088165 command_runner.go:130] >         "runtimeType": "",
	I0916 10:48:02.670503 2088165 command_runner.go:130] >         "runtimePath": "",
	I0916 10:48:02.670507 2088165 command_runner.go:130] >         "runtimeEngine": "",
	I0916 10:48:02.670512 2088165 command_runner.go:130] >         "PodAnnotations": null,
	I0916 10:48:02.670522 2088165 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 10:48:02.670526 2088165 command_runner.go:130] >         "runtimeRoot": "",
	I0916 10:48:02.670530 2088165 command_runner.go:130] >         "options": null,
	I0916 10:48:02.670535 2088165 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 10:48:02.670542 2088165 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 10:48:02.670546 2088165 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 10:48:02.670551 2088165 command_runner.go:130] >         "cniConfDir": "",
	I0916 10:48:02.670556 2088165 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 10:48:02.670560 2088165 command_runner.go:130] >         "snapshotter": "",
	I0916 10:48:02.670566 2088165 command_runner.go:130] >         "sandboxMode": ""
	I0916 10:48:02.670569 2088165 command_runner.go:130] >       },
	I0916 10:48:02.670573 2088165 command_runner.go:130] >       "runtimes": {
	I0916 10:48:02.670579 2088165 command_runner.go:130] >         "runc": {
	I0916 10:48:02.670584 2088165 command_runner.go:130] >           "runtimeType": "io.containerd.runc.v2",
	I0916 10:48:02.670596 2088165 command_runner.go:130] >           "runtimePath": "",
	I0916 10:48:02.670601 2088165 command_runner.go:130] >           "runtimeEngine": "",
	I0916 10:48:02.670606 2088165 command_runner.go:130] >           "PodAnnotations": null,
	I0916 10:48:02.670610 2088165 command_runner.go:130] >           "ContainerAnnotations": null,
	I0916 10:48:02.670615 2088165 command_runner.go:130] >           "runtimeRoot": "",
	I0916 10:48:02.670621 2088165 command_runner.go:130] >           "options": {
	I0916 10:48:02.670626 2088165 command_runner.go:130] >             "SystemdCgroup": false
	I0916 10:48:02.670629 2088165 command_runner.go:130] >           },
	I0916 10:48:02.670642 2088165 command_runner.go:130] >           "privileged_without_host_devices": false,
	I0916 10:48:02.670651 2088165 command_runner.go:130] >           "privileged_without_host_devices_all_devices_allowed": false,
	I0916 10:48:02.670655 2088165 command_runner.go:130] >           "baseRuntimeSpec": "",
	I0916 10:48:02.670659 2088165 command_runner.go:130] >           "cniConfDir": "",
	I0916 10:48:02.670663 2088165 command_runner.go:130] >           "cniMaxConfNum": 0,
	I0916 10:48:02.670699 2088165 command_runner.go:130] >           "snapshotter": "",
	I0916 10:48:02.670704 2088165 command_runner.go:130] >           "sandboxMode": "podsandbox"
	I0916 10:48:02.670707 2088165 command_runner.go:130] >         }
	I0916 10:48:02.670710 2088165 command_runner.go:130] >       },
	I0916 10:48:02.670716 2088165 command_runner.go:130] >       "noPivot": false,
	I0916 10:48:02.670720 2088165 command_runner.go:130] >       "disableSnapshotAnnotations": true,
	I0916 10:48:02.670724 2088165 command_runner.go:130] >       "discardUnpackedLayers": true,
	I0916 10:48:02.670729 2088165 command_runner.go:130] >       "ignoreBlockIONotEnabledErrors": false,
	I0916 10:48:02.670733 2088165 command_runner.go:130] >       "ignoreRdtNotEnabledErrors": false
	I0916 10:48:02.670736 2088165 command_runner.go:130] >     },
	I0916 10:48:02.670739 2088165 command_runner.go:130] >     "cni": {
	I0916 10:48:02.670743 2088165 command_runner.go:130] >       "binDir": "/opt/cni/bin",
	I0916 10:48:02.670748 2088165 command_runner.go:130] >       "confDir": "/etc/cni/net.d",
	I0916 10:48:02.670754 2088165 command_runner.go:130] >       "maxConfNum": 1,
	I0916 10:48:02.670759 2088165 command_runner.go:130] >       "setupSerially": false,
	I0916 10:48:02.670767 2088165 command_runner.go:130] >       "confTemplate": "",
	I0916 10:48:02.670771 2088165 command_runner.go:130] >       "ipPref": ""
	I0916 10:48:02.670774 2088165 command_runner.go:130] >     },
	I0916 10:48:02.670778 2088165 command_runner.go:130] >     "registry": {
	I0916 10:48:02.670783 2088165 command_runner.go:130] >       "configPath": "/etc/containerd/certs.d",
	I0916 10:48:02.670795 2088165 command_runner.go:130] >       "mirrors": null,
	I0916 10:48:02.670799 2088165 command_runner.go:130] >       "configs": null,
	I0916 10:48:02.670803 2088165 command_runner.go:130] >       "auths": null,
	I0916 10:48:02.670807 2088165 command_runner.go:130] >       "headers": null
	I0916 10:48:02.670810 2088165 command_runner.go:130] >     },
	I0916 10:48:02.670814 2088165 command_runner.go:130] >     "imageDecryption": {
	I0916 10:48:02.670818 2088165 command_runner.go:130] >       "keyModel": "node"
	I0916 10:48:02.670823 2088165 command_runner.go:130] >     },
	I0916 10:48:02.670829 2088165 command_runner.go:130] >     "disableTCPService": true,
	I0916 10:48:02.670833 2088165 command_runner.go:130] >     "streamServerAddress": "",
	I0916 10:48:02.670839 2088165 command_runner.go:130] >     "streamServerPort": "10010",
	I0916 10:48:02.670844 2088165 command_runner.go:130] >     "streamIdleTimeout": "4h0m0s",
	I0916 10:48:02.670850 2088165 command_runner.go:130] >     "enableSelinux": false,
	I0916 10:48:02.670854 2088165 command_runner.go:130] >     "selinuxCategoryRange": 1024,
	I0916 10:48:02.670862 2088165 command_runner.go:130] >     "sandboxImage": "registry.k8s.io/pause:3.10",
	I0916 10:48:02.670867 2088165 command_runner.go:130] >     "statsCollectPeriod": 10,
	I0916 10:48:02.670874 2088165 command_runner.go:130] >     "systemdCgroup": false,
	I0916 10:48:02.670878 2088165 command_runner.go:130] >     "enableTLSStreaming": false,
	I0916 10:48:02.670882 2088165 command_runner.go:130] >     "x509KeyPairStreaming": {
	I0916 10:48:02.670886 2088165 command_runner.go:130] >       "tlsCertFile": "",
	I0916 10:48:02.670889 2088165 command_runner.go:130] >       "tlsKeyFile": ""
	I0916 10:48:02.670893 2088165 command_runner.go:130] >     },
	I0916 10:48:02.670898 2088165 command_runner.go:130] >     "maxContainerLogSize": 16384,
	I0916 10:48:02.670902 2088165 command_runner.go:130] >     "disableCgroup": false,
	I0916 10:48:02.670906 2088165 command_runner.go:130] >     "disableApparmor": false,
	I0916 10:48:02.670913 2088165 command_runner.go:130] >     "restrictOOMScoreAdj": false,
	I0916 10:48:02.670917 2088165 command_runner.go:130] >     "maxConcurrentDownloads": 3,
	I0916 10:48:02.670922 2088165 command_runner.go:130] >     "disableProcMount": false,
	I0916 10:48:02.670928 2088165 command_runner.go:130] >     "unsetSeccompProfile": "",
	I0916 10:48:02.670933 2088165 command_runner.go:130] >     "tolerateMissingHugetlbController": true,
	I0916 10:48:02.670939 2088165 command_runner.go:130] >     "disableHugetlbController": true,
	I0916 10:48:02.670945 2088165 command_runner.go:130] >     "device_ownership_from_security_context": false,
	I0916 10:48:02.670957 2088165 command_runner.go:130] >     "ignoreImageDefinedVolumes": false,
	I0916 10:48:02.670962 2088165 command_runner.go:130] >     "netnsMountsUnderStateDir": false,
	I0916 10:48:02.670966 2088165 command_runner.go:130] >     "enableUnprivilegedPorts": true,
	I0916 10:48:02.670971 2088165 command_runner.go:130] >     "enableUnprivilegedICMP": false,
	I0916 10:48:02.670980 2088165 command_runner.go:130] >     "enableCDI": false,
	I0916 10:48:02.670984 2088165 command_runner.go:130] >     "cdiSpecDirs": [
	I0916 10:48:02.670988 2088165 command_runner.go:130] >       "/etc/cdi",
	I0916 10:48:02.670991 2088165 command_runner.go:130] >       "/var/run/cdi"
	I0916 10:48:02.670994 2088165 command_runner.go:130] >     ],
	I0916 10:48:02.671000 2088165 command_runner.go:130] >     "imagePullProgressTimeout": "5m0s",
	I0916 10:48:02.671007 2088165 command_runner.go:130] >     "drainExecSyncIOTimeout": "0s",
	I0916 10:48:02.671011 2088165 command_runner.go:130] >     "imagePullWithSyncFs": false,
	I0916 10:48:02.671020 2088165 command_runner.go:130] >     "ignoreDeprecationWarnings": null,
	I0916 10:48:02.671026 2088165 command_runner.go:130] >     "containerdRootDir": "/var/lib/containerd",
	I0916 10:48:02.671034 2088165 command_runner.go:130] >     "containerdEndpoint": "/run/containerd/containerd.sock",
	I0916 10:48:02.671039 2088165 command_runner.go:130] >     "rootDir": "/var/lib/containerd/io.containerd.grpc.v1.cri",
	I0916 10:48:02.671045 2088165 command_runner.go:130] >     "stateDir": "/run/containerd/io.containerd.grpc.v1.cri"
	I0916 10:48:02.671048 2088165 command_runner.go:130] >   },
	I0916 10:48:02.671054 2088165 command_runner.go:130] >   "golang": "go1.22.7",
	I0916 10:48:02.671058 2088165 command_runner.go:130] >   "lastCNILoadStatus": "OK",
	I0916 10:48:02.671065 2088165 command_runner.go:130] >   "lastCNILoadStatus.default": "OK"
	I0916 10:48:02.671068 2088165 command_runner.go:130] > }
	I0916 10:48:02.674112 2088165 cni.go:84] Creating CNI manager for ""
	I0916 10:48:02.674135 2088165 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:48:02.674144 2088165 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:48:02.674166 2088165 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8441 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:functional-911502 NodeName:functional-911502 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodP
ath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:48:02.674302 2088165 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8441
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "functional-911502"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8441
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:48:02.674377 2088165 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:48:02.682588 2088165 command_runner.go:130] > kubeadm
	I0916 10:48:02.682607 2088165 command_runner.go:130] > kubectl
	I0916 10:48:02.682612 2088165 command_runner.go:130] > kubelet
	I0916 10:48:02.683858 2088165 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:48:02.683975 2088165 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:48:02.692859 2088165 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (321 bytes)
	I0916 10:48:02.711631 2088165 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:48:02.731842 2088165 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2171 bytes)
	I0916 10:48:02.750860 2088165 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:48:02.754427 2088165 command_runner.go:130] > 192.168.49.2	control-plane.minikube.internal
	I0916 10:48:02.754604 2088165 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:02.864578 2088165 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:02.876577 2088165 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502 for IP: 192.168.49.2
	I0916 10:48:02.876603 2088165 certs.go:194] generating shared ca certs ...
	I0916 10:48:02.876619 2088165 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:02.876757 2088165 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:48:02.876812 2088165 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:48:02.876822 2088165 certs.go:256] generating profile certs ...
	I0916 10:48:02.876912 2088165 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.key
	I0916 10:48:02.877016 2088165 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key.03a9d60c
	I0916 10:48:02.877064 2088165 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key
	I0916 10:48:02.877076 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:48:02.877089 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:48:02.877101 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:48:02.877116 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:48:02.877126 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:48:02.877147 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:48:02.877162 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:48:02.877172 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:48:02.877223 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:48:02.877260 2088165 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:48:02.877272 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:48:02.877297 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:48:02.877326 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:48:02.877351 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:48:02.877396 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:48:02.877426 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:48:02.877443 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:48:02.877455 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:02.878009 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:48:02.905336 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:48:02.933262 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:48:02.958253 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:48:02.983055 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:48:03.010360 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:48:03.040350 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:48:03.067337 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:48:03.102805 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:48:03.135061 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:48:03.160952 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:48:03.185526 2088165 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:48:03.203896 2088165 ssh_runner.go:195] Run: openssl version
	I0916 10:48:03.209344 2088165 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 10:48:03.209746 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:48:03.219896 2088165 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:03.223424 2088165 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:03.223501 2088165 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:03.223587 2088165 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:03.230151 2088165 command_runner.go:130] > b5213941
	I0916 10:48:03.230559 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:48:03.239662 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:48:03.249181 2088165 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:48:03.252963 2088165 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:48:03.252995 2088165 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:48:03.253074 2088165 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:48:03.260600 2088165 command_runner.go:130] > 51391683
	I0916 10:48:03.260681 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:48:03.269973 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:48:03.279717 2088165 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:48:03.283489 2088165 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:48:03.283523 2088165 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:48:03.283579 2088165 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:48:03.290333 2088165 command_runner.go:130] > 3ec20f2e
	I0916 10:48:03.290803 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:48:03.301183 2088165 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:48:03.304780 2088165 command_runner.go:130] >   File: /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:48:03.304810 2088165 command_runner.go:130] >   Size: 1176      	Blocks: 8          IO Block: 4096   regular file
	I0916 10:48:03.304817 2088165 command_runner.go:130] > Device: 10301h/66305d	Inode: 1081533     Links: 1
	I0916 10:48:03.304824 2088165 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:48:03.304830 2088165 command_runner.go:130] > Access: 2024-09-16 10:47:21.714942040 +0000
	I0916 10:48:03.304843 2088165 command_runner.go:130] > Modify: 2024-09-16 10:47:21.714942040 +0000
	I0916 10:48:03.304853 2088165 command_runner.go:130] > Change: 2024-09-16 10:47:21.714942040 +0000
	I0916 10:48:03.304858 2088165 command_runner.go:130] >  Birth: 2024-09-16 10:47:21.714942040 +0000
	I0916 10:48:03.304925 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:48:03.311486 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.311890 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:48:03.318544 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.319055 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:48:03.325922 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.326327 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:48:03.333137 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.333544 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:48:03.339930 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.340335 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:48:03.346964 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.347404 2088165 kubeadm.go:392] StartCluster: {Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISer
verNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirr
or: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:03.347486 2088165 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:48:03.347545 2088165 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:48:03.389466 2088165 command_runner.go:130] > 11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950
	I0916 10:48:03.389521 2088165 command_runner.go:130] > 8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27
	I0916 10:48:03.389673 2088165 command_runner.go:130] > ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b
	I0916 10:48:03.389808 2088165 command_runner.go:130] > 57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6
	I0916 10:48:03.389919 2088165 command_runner.go:130] > a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19
	I0916 10:48:03.390056 2088165 command_runner.go:130] > 492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31
	I0916 10:48:03.390155 2088165 command_runner.go:130] > 31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44
	I0916 10:48:03.390307 2088165 command_runner.go:130] > 928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a
	I0916 10:48:03.393134 2088165 cri.go:89] found id: "11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950"
	I0916 10:48:03.393154 2088165 cri.go:89] found id: "8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27"
	I0916 10:48:03.393158 2088165 cri.go:89] found id: "ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b"
	I0916 10:48:03.393164 2088165 cri.go:89] found id: "57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6"
	I0916 10:48:03.393168 2088165 cri.go:89] found id: "a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	I0916 10:48:03.393171 2088165 cri.go:89] found id: "492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31"
	I0916 10:48:03.393175 2088165 cri.go:89] found id: "31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44"
	I0916 10:48:03.393178 2088165 cri.go:89] found id: "928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a"
	I0916 10:48:03.393182 2088165 cri.go:89] found id: ""
	I0916 10:48:03.393236 2088165 ssh_runner.go:195] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0916 10:48:03.422511 2088165 command_runner.go:130] > [{"ociVersion":"1.0.2-dev","id":"11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","pid":2200,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950/rootfs","created":"2024-09-16T10:47:56.240222491Z","annotations":{"io.kubernetes.cri.container-name":"coredns","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/coredns/coredns:v1.11.3","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"31265291ac7da
492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44","pid":1422,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44/rootfs","created":"2024-09-16T10:47:32.746424926Z","annotations":{"io.kubernetes.cri.container-name":"kube-scheduler","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-scheduler:v1.31.1","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","pid":1960,"status":"running","bundle":"/run/co
ntainerd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c/rootfs","created":"2024-09-16T10:47:45.184280835Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_storage-provisioner_ecac562d-8318-4226-b5f1-61f2c76bb51b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"492408bc37d38a1d8712ef754136e8b589841b3096dee7a1f
a2e6f6b99ce6c31","pid":1486,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31/rootfs","created":"2024-09-16T10:47:32.919386537Z","annotations":{"io.kubernetes.cri.container-name":"etcd","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/etcd:3.5.15-0","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","pid":1315,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e97
13de051f556893ed4577eb8b5d9d38835da8d64517b7a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a/rootfs","created":"2024-09-16T10:47:32.575723929Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"256","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-apiserver-functional-911502_846e81f7bcac6804cf5ef499ea5ac265","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","pid":1316,"status":"running
","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f/rootfs","created":"2024-09-16T10:47:32.562785558Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"204","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-controller-manager-functional-911502_26c4a2e985a1c721e0411e5d9497a35b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev
","id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","pid":1338,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7/rootfs","created":"2024-09-16T10:47:32.61203341Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-scheduler-functional-911502_69dae9ff35c780f43a15f539d6f19e46","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbo
x-uid":"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","pid":1792,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6/rootfs","created":"2024-09-16T10:47:44.701696235Z","annotations":{"io.kubernetes.cri.container-name":"kube-proxy","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-proxy:v1.31.1","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"8aede76947864ca07593bc24b939a
29faf7bb7dd85244f30f18f232f3ec1ea27","pid":2092,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27/rootfs","created":"2024-09-16T10:47:45.396938875Z","annotations":{"io.kubernetes.cri.container-name":"storage-provisioner","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"gcr.io/k8s-minikube/storage-provisioner:v5","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","pid":1399,"status":"running","bundle":"/run/containerd/io.cont
ainerd.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a/rootfs","created":"2024-09-16T10:47:32.722647206Z","annotations":{"io.kubernetes.cri.container-name":"kube-controller-manager","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-controller-manager:v1.31.1","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","pid":2166,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b
890b3ee6fc0070983ddfd","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd/rootfs","created":"2024-09-16T10:47:56.156755788Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_coredns-7c65d6cfc9-6kw9d_072167c7-fa1a-463e-a957-91ea24020387","io.kubernetes.cri.sandbox-memory":"178257920","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","pid":1479,"status":"running","bundle":"/run/contain
erd/io.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19/rootfs","created":"2024-09-16T10:47:32.854949973Z","annotations":{"io.kubernetes.cri.container-name":"kube-apiserver","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-apiserver:v1.31.1","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","pid":1768,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891
ab78e","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e/rootfs","created":"2024-09-16T10:47:44.62156803Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"10000","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kindnet-7r2rg_ab52a601-e0fe-4f60-a202-477487da9bb2","io.kubernetes.cri.sandbox-memory":"52428800","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","pid":1733,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8
s.io/c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1/rootfs","created":"2024-09-16T10:47:44.525242585Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-proxy-l59dx_72a26843-9f97-4121-91f0-3cb389048315","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","pid":1835,"status":"running",
"bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b/rootfs","created":"2024-09-16T10:47:44.827883907Z","annotations":{"io.kubernetes.cri.container-name":"kindnet-cni","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"docker.io/kindest/kindnetd:v20240813-c6f155d6","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","pid":1295,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d4
8ffedbe08802f86e","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e/rootfs","created":"2024-09-16T10:47:32.534113117Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_etcd-functional-911502_c7ab8017ca620f2ba7e026f4cdb427a2","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"}]
	I0916 10:48:03.424698 2088165 cri.go:116] JSON = [{"ociVersion":"1.0.2-dev","id":"11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","pid":2200,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950/rootfs","created":"2024-09-16T10:47:56.240222491Z","annotations":{"io.kubernetes.cri.container-name":"coredns","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/coredns/coredns:v1.11.3","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"31265291ac7da492c3c
fad84540ba2b684cdf0abad82be5c56d392df7613dc44","pid":1422,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44/rootfs","created":"2024-09-16T10:47:32.746424926Z","annotations":{"io.kubernetes.cri.container-name":"kube-scheduler","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-scheduler:v1.31.1","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","pid":1960,"status":"running","bundle":"/run/containe
rd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c/rootfs","created":"2024-09-16T10:47:45.184280835Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_storage-provisioner_ecac562d-8318-4226-b5f1-61f2c76bb51b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6
b99ce6c31","pid":1486,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31/rootfs","created":"2024-09-16T10:47:32.919386537Z","annotations":{"io.kubernetes.cri.container-name":"etcd","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/etcd:3.5.15-0","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","pid":1315,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de05
1f556893ed4577eb8b5d9d38835da8d64517b7a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a/rootfs","created":"2024-09-16T10:47:32.575723929Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"256","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-apiserver-functional-911502_846e81f7bcac6804cf5ef499ea5ac265","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","pid":1316,"status":"running","bun
dle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f/rootfs","created":"2024-09-16T10:47:32.562785558Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"204","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-controller-manager-functional-911502_26c4a2e985a1c721e0411e5d9497a35b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id"
:"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","pid":1338,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7/rootfs","created":"2024-09-16T10:47:32.61203341Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-scheduler-functional-911502_69dae9ff35c780f43a15f539d6f19e46","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid"
:"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","pid":1792,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6/rootfs","created":"2024-09-16T10:47:44.701696235Z","annotations":{"io.kubernetes.cri.container-name":"kube-proxy","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-proxy:v1.31.1","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"8aede76947864ca07593bc24b939a29faf7
bb7dd85244f30f18f232f3ec1ea27","pid":2092,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27/rootfs","created":"2024-09-16T10:47:45.396938875Z","annotations":{"io.kubernetes.cri.container-name":"storage-provisioner","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"gcr.io/k8s-minikube/storage-provisioner:v5","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","pid":1399,"status":"running","bundle":"/run/containerd/io.containerd
.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a/rootfs","created":"2024-09-16T10:47:32.722647206Z","annotations":{"io.kubernetes.cri.container-name":"kube-controller-manager","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-controller-manager:v1.31.1","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","pid":2166,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3e
e6fc0070983ddfd","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd/rootfs","created":"2024-09-16T10:47:56.156755788Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_coredns-7c65d6cfc9-6kw9d_072167c7-fa1a-463e-a957-91ea24020387","io.kubernetes.cri.sandbox-memory":"178257920","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","pid":1479,"status":"running","bundle":"/run/containerd/io
.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19/rootfs","created":"2024-09-16T10:47:32.854949973Z","annotations":{"io.kubernetes.cri.container-name":"kube-apiserver","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-apiserver:v1.31.1","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","pid":1768,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e"
,"rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e/rootfs","created":"2024-09-16T10:47:44.62156803Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"10000","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kindnet-7r2rg_ab52a601-e0fe-4f60-a202-477487da9bb2","io.kubernetes.cri.sandbox-memory":"52428800","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","pid":1733,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c
900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1/rootfs","created":"2024-09-16T10:47:44.525242585Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-proxy-l59dx_72a26843-9f97-4121-91f0-3cb389048315","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","pid":1835,"status":"running","bundl
e":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b/rootfs","created":"2024-09-16T10:47:44.827883907Z","annotations":{"io.kubernetes.cri.container-name":"kindnet-cni","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"docker.io/kindest/kindnetd:v20240813-c6f155d6","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","pid":1295,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedb
e08802f86e","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e/rootfs","created":"2024-09-16T10:47:32.534113117Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_etcd-functional-911502_c7ab8017ca620f2ba7e026f4cdb427a2","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"}]
	I0916 10:48:03.424996 2088165 cri.go:126] list returned 16 containers
	I0916 10:48:03.425013 2088165 cri.go:129] container: {ID:11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 Status:running}
	I0916 10:48:03.425028 2088165 cri.go:135] skipping {11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 running}: state = "running", want "paused"
	I0916 10:48:03.425036 2088165 cri.go:129] container: {ID:31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 Status:running}
	I0916 10:48:03.425042 2088165 cri.go:135] skipping {31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 running}: state = "running", want "paused"
	I0916 10:48:03.425051 2088165 cri.go:129] container: {ID:334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c Status:running}
	I0916 10:48:03.425058 2088165 cri.go:131] skipping 334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c - not in ps
	I0916 10:48:03.425065 2088165 cri.go:129] container: {ID:492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 Status:running}
	I0916 10:48:03.425072 2088165 cri.go:135] skipping {492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 running}: state = "running", want "paused"
	I0916 10:48:03.425077 2088165 cri.go:129] container: {ID:51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a Status:running}
	I0916 10:48:03.425082 2088165 cri.go:131] skipping 51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a - not in ps
	I0916 10:48:03.425092 2088165 cri.go:129] container: {ID:54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f Status:running}
	I0916 10:48:03.425097 2088165 cri.go:131] skipping 54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f - not in ps
	I0916 10:48:03.425102 2088165 cri.go:129] container: {ID:578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7 Status:running}
	I0916 10:48:03.425109 2088165 cri.go:131] skipping 578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7 - not in ps
	I0916 10:48:03.425112 2088165 cri.go:129] container: {ID:57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 Status:running}
	I0916 10:48:03.425118 2088165 cri.go:135] skipping {57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 running}: state = "running", want "paused"
	I0916 10:48:03.425125 2088165 cri.go:129] container: {ID:8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 Status:running}
	I0916 10:48:03.425134 2088165 cri.go:135] skipping {8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 running}: state = "running", want "paused"
	I0916 10:48:03.425139 2088165 cri.go:129] container: {ID:928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a Status:running}
	I0916 10:48:03.425146 2088165 cri.go:135] skipping {928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a running}: state = "running", want "paused"
	I0916 10:48:03.425152 2088165 cri.go:129] container: {ID:95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd Status:running}
	I0916 10:48:03.425159 2088165 cri.go:131] skipping 95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd - not in ps
	I0916 10:48:03.425163 2088165 cri.go:129] container: {ID:a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 Status:running}
	I0916 10:48:03.425169 2088165 cri.go:135] skipping {a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 running}: state = "running", want "paused"
	I0916 10:48:03.425176 2088165 cri.go:129] container: {ID:b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e Status:running}
	I0916 10:48:03.425184 2088165 cri.go:131] skipping b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e - not in ps
	I0916 10:48:03.425189 2088165 cri.go:129] container: {ID:c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1 Status:running}
	I0916 10:48:03.425193 2088165 cri.go:131] skipping c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1 - not in ps
	I0916 10:48:03.425197 2088165 cri.go:129] container: {ID:ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b Status:running}
	I0916 10:48:03.425204 2088165 cri.go:135] skipping {ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b running}: state = "running", want "paused"
	I0916 10:48:03.425209 2088165 cri.go:129] container: {ID:e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e Status:running}
	I0916 10:48:03.425215 2088165 cri.go:131] skipping e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e - not in ps
	I0916 10:48:03.425270 2088165 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:48:03.434943 2088165 command_runner.go:130] > /var/lib/kubelet/config.yaml
	I0916 10:48:03.434970 2088165 command_runner.go:130] > /var/lib/kubelet/kubeadm-flags.env
	I0916 10:48:03.434977 2088165 command_runner.go:130] > /var/lib/minikube/etcd:
	I0916 10:48:03.434980 2088165 command_runner.go:130] > member
	I0916 10:48:03.434999 2088165 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 10:48:03.435005 2088165 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 10:48:03.435058 2088165 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 10:48:03.444332 2088165 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:48:03.444876 2088165 kubeconfig.go:125] found "functional-911502" server: "https://192.168.49.2:8441"
	I0916 10:48:03.445356 2088165 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:03.445657 2088165 kapi.go:59] client config for functional-911502: &rest.Config{Host:"https://192.168.49.2:8441", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(ni
l), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:48:03.446313 2088165 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 10:48:03.446395 2088165 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 10:48:03.457796 2088165 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.49.2
	I0916 10:48:03.457836 2088165 kubeadm.go:597] duration metric: took 22.82566ms to restartPrimaryControlPlane
	I0916 10:48:03.457847 2088165 kubeadm.go:394] duration metric: took 110.449114ms to StartCluster
	I0916 10:48:03.457881 2088165 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:03.457963 2088165 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:03.458667 2088165 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:03.458968 2088165 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:48:03.459354 2088165 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:03.459393 2088165 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:48:03.459499 2088165 addons.go:69] Setting storage-provisioner=true in profile "functional-911502"
	I0916 10:48:03.459522 2088165 addons.go:234] Setting addon storage-provisioner=true in "functional-911502"
	W0916 10:48:03.459531 2088165 addons.go:243] addon storage-provisioner should already be in state true
	I0916 10:48:03.459557 2088165 addons.go:69] Setting default-storageclass=true in profile "functional-911502"
	I0916 10:48:03.459594 2088165 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "functional-911502"
	I0916 10:48:03.459610 2088165 host.go:66] Checking if "functional-911502" exists ...
	I0916 10:48:03.459971 2088165 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:03.460078 2088165 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:03.463751 2088165 out.go:177] * Verifying Kubernetes components...
	I0916 10:48:03.465786 2088165 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:03.492327 2088165 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:03.492591 2088165 kapi.go:59] client config for functional-911502: &rest.Config{Host:"https://192.168.49.2:8441", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(ni
l), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:48:03.492857 2088165 addons.go:234] Setting addon default-storageclass=true in "functional-911502"
	W0916 10:48:03.492869 2088165 addons.go:243] addon default-storageclass should already be in state true
	I0916 10:48:03.492895 2088165 host.go:66] Checking if "functional-911502" exists ...
	I0916 10:48:03.493327 2088165 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:03.495766 2088165 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:48:03.497894 2088165 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:03.497918 2088165 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:48:03.497988 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:03.524383 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:03.535658 2088165 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:03.535679 2088165 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:48:03.535743 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:03.559290 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:03.630778 2088165 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:03.645288 2088165 node_ready.go:35] waiting up to 6m0s for node "functional-911502" to be "Ready" ...
	I0916 10:48:03.645411 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:03.645423 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.645432 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.645436 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.660535 2088165 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 10:48:03.660564 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.660573 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.660577 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.660580 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.660583 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.660588 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.660590 2088165 round_trippers.go:580]     Audit-Id: 7d3a4638-e932-4d74-b644-bf06bf86d216
	I0916 10:48:03.660740 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:03.661523 2088165 node_ready.go:49] node "functional-911502" has status "Ready":"True"
	I0916 10:48:03.661547 2088165 node_ready.go:38] duration metric: took 16.226104ms for node "functional-911502" to be "Ready" ...
	I0916 10:48:03.661558 2088165 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:03.661622 2088165 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 10:48:03.661642 2088165 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 10:48:03.661697 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:03.661714 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.661729 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.661738 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.665791 2088165 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:48:03.665823 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.665831 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.665835 2088165 round_trippers.go:580]     Audit-Id: bd947b24-7cff-4e2d-bf6a-a19671aac2cf
	I0916 10:48:03.665838 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.665841 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.665844 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.665846 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.666612 2088165 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-6kw9d","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"072167c7-fa1a-463e-a957-91ea24020387","resourceVersion":"413","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a7b12c6a-a15b-40a7-bca2-b089a82851d2","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a7b12c6a-a15b-40a7-bca2-b089a82851d2\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 58828 chars]
	I0916 10:48:03.671275 2088165 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.671383 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-6kw9d
	I0916 10:48:03.671392 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.671401 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.671405 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.673677 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.673696 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.673704 2088165 round_trippers.go:580]     Audit-Id: 7a50e2b3-41d6-4368-b716-123bfea51442
	I0916 10:48:03.673717 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.673720 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.673723 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.673726 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.673730 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.674041 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-6kw9d","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"072167c7-fa1a-463e-a957-91ea24020387","resourceVersion":"413","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a7b12c6a-a15b-40a7-bca2-b089a82851d2","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a7b12c6a-a15b-40a7-bca2-b089a82851d2\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6481 chars]
	I0916 10:48:03.674594 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:03.674614 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.674624 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.674629 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.676727 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.676748 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.676757 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.676762 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.676765 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.676768 2088165 round_trippers.go:580]     Audit-Id: 939eac82-f066-4f20-a04c-bd069cb1f232
	I0916 10:48:03.676771 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.676773 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.677209 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:03.677592 2088165 pod_ready.go:93] pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:03.677612 2088165 pod_ready.go:82] duration metric: took 6.30474ms for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.677622 2088165 pod_ready.go:79] waiting up to 6m0s for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.677684 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/etcd-functional-911502
	I0916 10:48:03.677695 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.677703 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.677708 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.679894 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.679927 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.679935 2088165 round_trippers.go:580]     Audit-Id: 52c0bb97-d0e8-458d-95af-3a903313478c
	I0916 10:48:03.679940 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.679944 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.679947 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.679950 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.679953 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.680607 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-functional-911502","namespace":"kube-system","uid":"9cccef26-ac83-485f-a6ae-2017f0ff645b","resourceVersion":"402","creationTimestamp":"2024-09-16T10:47:37Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.49.2:2379","kubernetes.io/config.hash":"c7ab8017ca620f2ba7e026f4cdb427a2","kubernetes.io/config.mirror":"c7ab8017ca620f2ba7e026f4cdb427a2","kubernetes.io/config.seen":"2024-09-16T10:47:31.996556421Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-
client-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/confi [truncated 6445 chars]
	I0916 10:48:03.681113 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:03.681130 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.681139 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.681143 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.683628 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.683649 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.683657 2088165 round_trippers.go:580]     Audit-Id: 04d2b56a-1fb5-44bb-a456-db9c38ec437a
	I0916 10:48:03.683661 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.683664 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.683667 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.683670 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.683673 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.684541 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:03.684931 2088165 pod_ready.go:93] pod "etcd-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:03.684956 2088165 pod_ready.go:82] duration metric: took 7.326452ms for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.684970 2088165 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.685043 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-911502
	I0916 10:48:03.685054 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.685062 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.685068 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.687419 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.687440 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.687448 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.687466 2088165 round_trippers.go:580]     Audit-Id: bcc7e83b-5656-4754-9aba-3b769e3df8ca
	I0916 10:48:03.687473 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.687476 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.687482 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.687485 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.688021 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-functional-911502","namespace":"kube-system","uid":"d399bd77-51dd-4ad3-90d4-6cf11e9e156e","resourceVersion":"301","creationTimestamp":"2024-09-16T10:47:39Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.49.2:8441","kubernetes.io/config.hash":"846e81f7bcac6804cf5ef499ea5ac265","kubernetes.io/config.mirror":"846e81f7bcac6804cf5ef499ea5ac265","kubernetes.io/config.seen":"2024-09-16T10:47:39.051012852Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:39Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.ku
bernetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes [truncated 8521 chars]
	I0916 10:48:03.688559 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:03.688576 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.688584 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.688589 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.690702 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.690720 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.690728 2088165 round_trippers.go:580]     Audit-Id: 3b495a44-0202-4d14-b96c-97f07d2bc499
	I0916 10:48:03.690733 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.690753 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.690763 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.690766 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.690769 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.691282 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:03.691685 2088165 pod_ready.go:93] pod "kube-apiserver-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:03.691714 2088165 pod_ready.go:82] duration metric: took 6.734169ms for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.691726 2088165 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.691797 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-911502
	I0916 10:48:03.691807 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.691815 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.691820 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.694012 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.694032 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.694041 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.694045 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.694048 2088165 round_trippers.go:580]     Audit-Id: 60a5cbd4-cb73-46d2-a052-ab9b16a0df91
	I0916 10:48:03.694063 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.694072 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.694075 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.694623 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-functional-911502","namespace":"kube-system","uid":"60f8d5ef-11df-400e-bce8-00ed7502b8c7","resourceVersion":"397","creationTimestamp":"2024-09-16T10:47:39Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"26c4a2e985a1c721e0411e5d9497a35b","kubernetes.io/config.mirror":"26c4a2e985a1c721e0411e5d9497a35b","kubernetes.io/config.seen":"2024-09-16T10:47:39.051014378Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:39Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes
.io/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{"." [truncated 8096 chars]
	I0916 10:48:03.695213 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:03.695233 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.695242 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.695246 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.697778 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.697799 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.697808 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.697813 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.697816 2088165 round_trippers.go:580]     Audit-Id: d25b7667-3d27-4852-a1db-acd4e3def388
	I0916 10:48:03.697819 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.697821 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.697824 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.698450 2088165 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:03.699849 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:03.700223 2088165 pod_ready.go:93] pod "kube-controller-manager-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:03.700239 2088165 pod_ready.go:82] duration metric: took 8.504824ms for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.700250 2088165 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.717382 2088165 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:03.845928 2088165 request.go:632] Waited for 145.611246ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-proxy-l59dx
	I0916 10:48:03.846018 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-proxy-l59dx
	I0916 10:48:03.846043 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.846052 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.846063 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.848622 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.848646 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.848655 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.848659 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.848663 2088165 round_trippers.go:580]     Audit-Id: 56857d5c-2cf6-48d4-a5e5-52df385277e0
	I0916 10:48:03.848674 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.848677 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.848681 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.849192 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-l59dx","generateName":"kube-proxy-","namespace":"kube-system","uid":"72a26843-9f97-4121-91f0-3cb389048315","resourceVersion":"381","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"35f9ba26-390b-4032-b0bf-72d6e30119ee","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"35f9ba26-390b-4032-b0bf-72d6e30119ee\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6177 chars]
	I0916 10:48:04.046546 2088165 request.go:632] Waited for 196.765909ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:04.046620 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:04.046630 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.046639 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.046653 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.049353 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.049430 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.049453 2088165 round_trippers.go:580]     Audit-Id: 84721d2e-7d21-4d43-a34b-77d002ffe4f4
	I0916 10:48:04.049472 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.049569 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.049597 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.049614 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.049630 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.049811 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:04.050261 2088165 pod_ready.go:93] pod "kube-proxy-l59dx" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:04.050286 2088165 pod_ready.go:82] duration metric: took 350.026689ms for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:04.050299 2088165 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:04.127576 2088165 command_runner.go:130] > serviceaccount/storage-provisioner unchanged
	I0916 10:48:04.156134 2088165 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner unchanged
	I0916 10:48:04.180061 2088165 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner unchanged
	I0916 10:48:04.203294 2088165 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner unchanged
	I0916 10:48:04.245493 2088165 request.go:632] Waited for 195.09035ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-911502
	I0916 10:48:04.245592 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-911502
	I0916 10:48:04.245606 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.245616 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.245639 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.248067 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.248137 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.248159 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.248179 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.248193 2088165 round_trippers.go:580]     Audit-Id: 53f981f0-e971-4733-bf38-42a727012b1f
	I0916 10:48:04.248219 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.248235 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.248249 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.249038 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-911502","namespace":"kube-system","uid":"7da8ecbb-189d-4ed2-bcbe-69ef483b67e8","resourceVersion":"359","creationTimestamp":"2024-09-16T10:47:39Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"69dae9ff35c780f43a15f539d6f19e46","kubernetes.io/config.mirror":"69dae9ff35c780f43a15f539d6f19e46","kubernetes.io/config.seen":"2024-09-16T10:47:39.051015494Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:39Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 4978 chars]
	I0916 10:48:04.313333 2088165 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath unchanged
	I0916 10:48:04.432371 2088165 command_runner.go:130] > pod/storage-provisioner configured
	I0916 10:48:04.437599 2088165 command_runner.go:130] > storageclass.storage.k8s.io/standard unchanged
	I0916 10:48:04.437721 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/apis/storage.k8s.io/v1/storageclasses
	I0916 10:48:04.437734 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.437744 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.437749 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.440211 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.440235 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.440244 2088165 round_trippers.go:580]     Content-Length: 1273
	I0916 10:48:04.440250 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.440253 2088165 round_trippers.go:580]     Audit-Id: 6c394b0f-6b8a-4424-8a7f-4ad929b8b1e5
	I0916 10:48:04.440256 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.440259 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.440274 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.440281 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.440348 2088165 request.go:1351] Response Body: {"kind":"StorageClassList","apiVersion":"storage.k8s.io/v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"standard","uid":"09dcbef5-fa0f-4e66-b848-ffef0edc1433","resourceVersion":"340","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kuberne
tes.io/last-applied-configuration":{},"f:storageclass.kubernetes.io/is- [truncated 249 chars]
	I0916 10:48:04.440831 2088165 request.go:1351] Request Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"09dcbef5-fa0f-4e66-b848-ffef0edc1433","resourceVersion":"340","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storageclas
s.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 10:48:04.440888 2088165 round_trippers.go:463] PUT https://192.168.49.2:8441/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 10:48:04.440899 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.440907 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.440913 2088165 round_trippers.go:473]     Content-Type: application/json
	I0916 10:48:04.440924 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.443808 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.443831 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.443840 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.443844 2088165 round_trippers.go:580]     Audit-Id: b8d9cfe0-12be-4fe1-9c8a-fc402f8daf5b
	I0916 10:48:04.443848 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.443868 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.443877 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.443880 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.443883 2088165 round_trippers.go:580]     Content-Length: 1220
	I0916 10:48:04.443917 2088165 request.go:1351] Response Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"09dcbef5-fa0f-4e66-b848-ffef0edc1433","resourceVersion":"340","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storagecla
ss.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 10:48:04.445980 2088165 request.go:632] Waited for 196.425792ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:04.446033 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:04.446044 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.446053 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.446057 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.446920 2088165 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 10:48:04.448570 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.448589 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.448597 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.448602 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.448605 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.448608 2088165 round_trippers.go:580]     Audit-Id: a457554d-37d8-4e36-99a3-a36abe6cba96
	I0916 10:48:04.448611 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.448613 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.448739 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:04.448976 2088165 addons.go:510] duration metric: took 989.576689ms for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 10:48:04.449161 2088165 pod_ready.go:93] pod "kube-scheduler-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:04.449175 2088165 pod_ready.go:82] duration metric: took 398.850709ms for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:04.449185 2088165 pod_ready.go:39] duration metric: took 787.616557ms for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:04.449204 2088165 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:48:04.449265 2088165 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:48:04.459023 2088165 command_runner.go:130] > 1479
	I0916 10:48:04.460316 2088165 api_server.go:72] duration metric: took 1.001309547s to wait for apiserver process to appear ...
	I0916 10:48:04.460338 2088165 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:48:04.460359 2088165 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:04.468569 2088165 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
	ok
	I0916 10:48:04.468653 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/version
	I0916 10:48:04.468664 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.468675 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.468678 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.470099 2088165 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:04.470133 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.470140 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.470146 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.470149 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.470153 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.470155 2088165 round_trippers.go:580]     Content-Length: 263
	I0916 10:48:04.470158 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.470161 2088165 round_trippers.go:580]     Audit-Id: 3d925272-5044-4fbe-9e45-9b830c352d6d
	I0916 10:48:04.470177 2088165 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 10:48:04.470300 2088165 api_server.go:141] control plane version: v1.31.1
	I0916 10:48:04.470320 2088165 api_server.go:131] duration metric: took 9.975609ms to wait for apiserver health ...
	I0916 10:48:04.470332 2088165 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:48:04.645543 2088165 request.go:632] Waited for 175.134103ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:04.645629 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:04.645642 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.645651 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.645657 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.648503 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.648539 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.648549 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.648555 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.648559 2088165 round_trippers.go:580]     Audit-Id: 276c41ab-f18c-4cd0-b778-e7e69343c450
	I0916 10:48:04.648561 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.648564 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.648567 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.649675 2088165 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-6kw9d","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"072167c7-fa1a-463e-a957-91ea24020387","resourceVersion":"413","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a7b12c6a-a15b-40a7-bca2-b089a82851d2","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a7b12c6a-a15b-40a7-bca2-b089a82851d2\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 58828 chars]
	I0916 10:48:04.652245 2088165 system_pods.go:59] 8 kube-system pods found
	I0916 10:48:04.652287 2088165 system_pods.go:61] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running
	I0916 10:48:04.652297 2088165 system_pods.go:61] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running
	I0916 10:48:04.652302 2088165 system_pods.go:61] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:04.652315 2088165 system_pods.go:61] "kube-apiserver-functional-911502" [d399bd77-51dd-4ad3-90d4-6cf11e9e156e] Running
	I0916 10:48:04.652320 2088165 system_pods.go:61] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running
	I0916 10:48:04.652324 2088165 system_pods.go:61] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:04.652329 2088165 system_pods.go:61] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running
	I0916 10:48:04.652337 2088165 system_pods.go:61] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running
	I0916 10:48:04.652343 2088165 system_pods.go:74] duration metric: took 182.003926ms to wait for pod list to return data ...
	I0916 10:48:04.652354 2088165 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:48:04.845838 2088165 request.go:632] Waited for 193.389585ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/namespaces/default/serviceaccounts
	I0916 10:48:04.845922 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/default/serviceaccounts
	I0916 10:48:04.845934 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.845943 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.845950 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.848728 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.848753 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.848763 2088165 round_trippers.go:580]     Audit-Id: b1d51c6d-03dd-45c8-99f8-5de75e4ae5eb
	I0916 10:48:04.848768 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.848772 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.848776 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.848779 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.848781 2088165 round_trippers.go:580]     Content-Length: 261
	I0916 10:48:04.848784 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.848844 2088165 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"edf53d44-fa4f-4597-be01-9bceb291c079","resourceVersion":"304","creationTimestamp":"2024-09-16T10:47:43Z"}}]}
	I0916 10:48:04.849067 2088165 default_sa.go:45] found service account: "default"
	I0916 10:48:04.849091 2088165 default_sa.go:55] duration metric: took 196.730906ms for default service account to be created ...
	I0916 10:48:04.849104 2088165 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:48:05.045482 2088165 request.go:632] Waited for 196.308672ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:05.045591 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:05.045604 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:05.045613 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.045632 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.048822 2088165 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:48:05.048888 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:05.048910 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.048928 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:05.048943 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:05.048971 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.048993 2088165 round_trippers.go:580]     Audit-Id: 8a307c14-81a7-4b63-89d0-6e81f4952347
	I0916 10:48:05.049008 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.049540 2088165 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-6kw9d","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"072167c7-fa1a-463e-a957-91ea24020387","resourceVersion":"413","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a7b12c6a-a15b-40a7-bca2-b089a82851d2","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a7b12c6a-a15b-40a7-bca2-b089a82851d2\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 58828 chars]
	I0916 10:48:05.052262 2088165 system_pods.go:86] 8 kube-system pods found
	I0916 10:48:05.052310 2088165 system_pods.go:89] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running
	I0916 10:48:05.052318 2088165 system_pods.go:89] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running
	I0916 10:48:05.052323 2088165 system_pods.go:89] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:05.052329 2088165 system_pods.go:89] "kube-apiserver-functional-911502" [d399bd77-51dd-4ad3-90d4-6cf11e9e156e] Running
	I0916 10:48:05.052335 2088165 system_pods.go:89] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running
	I0916 10:48:05.052340 2088165 system_pods.go:89] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:05.052344 2088165 system_pods.go:89] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running
	I0916 10:48:05.052350 2088165 system_pods.go:89] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running
	I0916 10:48:05.052359 2088165 system_pods.go:126] duration metric: took 203.244785ms to wait for k8s-apps to be running ...
	I0916 10:48:05.052370 2088165 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:48:05.052438 2088165 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:48:05.064597 2088165 system_svc.go:56] duration metric: took 12.217908ms WaitForService to wait for kubelet
	I0916 10:48:05.064684 2088165 kubeadm.go:582] duration metric: took 1.605680381s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:05.064710 2088165 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:48:05.246151 2088165 request.go:632] Waited for 181.330182ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/nodes
	I0916 10:48:05.246216 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes
	I0916 10:48:05.246223 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:05.246232 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.246242 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.248710 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:05.248735 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:05.248744 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.248748 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:05.248751 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:05.248754 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.248757 2088165 round_trippers.go:580]     Audit-Id: fdca99b8-ca8f-4880-bfe3-aa5f1a024c7b
	I0916 10:48:05.248759 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.248955 2088165 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"ma
nagedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v [truncated 5160 chars]
	I0916 10:48:05.249387 2088165 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:48:05.249421 2088165 node_conditions.go:123] node cpu capacity is 2
	I0916 10:48:05.249432 2088165 node_conditions.go:105] duration metric: took 184.716467ms to run NodePressure ...
	I0916 10:48:05.249448 2088165 start.go:241] waiting for startup goroutines ...
	I0916 10:48:05.249463 2088165 start.go:246] waiting for cluster config update ...
	I0916 10:48:05.249474 2088165 start.go:255] writing updated cluster config ...
	I0916 10:48:05.249777 2088165 ssh_runner.go:195] Run: rm -f paused
	I0916 10:48:05.256548 2088165 out.go:177] * Done! kubectl is now configured to use "functional-911502" cluster and "default" namespace by default
	E0916 10:48:05.259198 2088165 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	11757969f67eb       2f6c962e7b831       10 seconds ago      Running             coredns                   0                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	8aede76947864       ba04bb24b9575       21 seconds ago      Running             storage-provisioner       0                   334ec243859df       storage-provisioner
	ce5a28d1cb5d2       6a23fa8fd2b78       21 seconds ago      Running             kindnet-cni               0                   b400f9b4bc923       kindnet-7r2rg
	57c3cd94d0c59       24a140c548c07       21 seconds ago      Running             kube-proxy                0                   c900cfd22280f       kube-proxy-l59dx
	a27dc93745c62       d3f53a98c0a9d       33 seconds ago      Running             kube-apiserver            0                   51fb442d9d3e0       kube-apiserver-functional-911502
	492408bc37d38       27e3830e14027       33 seconds ago      Running             etcd                      0                   e43a7a67672f1       etcd-functional-911502
	31265291ac7da       7f8aa378bb47d       33 seconds ago      Running             kube-scheduler            0                   578f22ca4016c       kube-scheduler-functional-911502
	928f2c64d0a66       279f381cb3736       33 seconds ago      Running             kube-controller-manager   0                   54de1abbce22f       kube-controller-manager-functional-911502
	
	
	==> containerd <==
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.318999212Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319012922Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319080089Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319102842Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319116610Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319130665Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319142275Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319161590Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319174160Z" level=info msg="NRI interface is disabled by configuration."
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319185828Z" level=info msg="loading plugin \"io.containerd.grpc.v1.cri\"..." type=io.containerd.grpc.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319497194Z" level=info msg="Start cri plugin with config {PluginConfig:{ContainerdConfig:{Snapshotter:overlayfs DefaultRuntimeName:runc DefaultRuntime:{Type: Path: Engine: PodAnnotations:[] ContainerAnnotations:[] Root: Options:map[] PrivilegedWithoutHostDevices:false PrivilegedWithoutHostDevicesAllDevicesAllowed:false BaseRuntimeSpec: NetworkPluginConfDir: NetworkPluginMaxConfNum:0 Snapshotter: SandboxMode:} UntrustedWorkloadRuntime:{Type: Path: Engine: PodAnnotations:[] ContainerAnnotations:[] Root: Options:map[] PrivilegedWithoutHostDevices:false PrivilegedWithoutHostDevicesAllDevicesAllowed:false BaseRuntimeSpec: NetworkPluginConfDir: NetworkPluginMaxConfNum:0 Snapshotter: SandboxMode:} Runtimes:map[runc:{Type:io.containerd.runc.v2 Path: Engine: PodAnnotations:[] ContainerAnnotations:[] Root: Options:map[SystemdCgroup:false] PrivilegedWithoutHostDevices:false PrivilegedWithoutHostDevicesAllDevicesAllowed:false BaseRunti
meSpec: NetworkPluginConfDir: NetworkPluginMaxConfNum:0 Snapshotter: SandboxMode:podsandbox}] NoPivot:false DisableSnapshotAnnotations:true DiscardUnpackedLayers:true IgnoreBlockIONotEnabledErrors:false IgnoreRdtNotEnabledErrors:false} CniConfig:{NetworkPluginBinDir:/opt/cni/bin NetworkPluginConfDir:/etc/cni/net.d NetworkPluginMaxConfNum:1 NetworkPluginSetupSerially:false NetworkPluginConfTemplate: IPPreference:} Registry:{ConfigPath:/etc/containerd/certs.d Mirrors:map[] Configs:map[] Auths:map[] Headers:map[]} ImageDecryption:{KeyModel:node} DisableTCPService:true StreamServerAddress: StreamServerPort:10010 StreamIdleTimeout:4h0m0s EnableSelinux:false SelinuxCategoryRange:1024 SandboxImage:registry.k8s.io/pause:3.10 StatsCollectPeriod:10 SystemdCgroup:false EnableTLSStreaming:false X509KeyPairStreaming:{TLSCertFile: TLSKeyFile:} MaxContainerLogLineSize:16384 DisableCgroup:false DisableApparmor:false RestrictOOMScoreAdj:false MaxConcurrentDownloads:3 DisableProcMount:false UnsetSeccompProfile: TolerateMissing
HugetlbController:true DisableHugetlbController:true DeviceOwnershipFromSecurityContext:false IgnoreImageDefinedVolumes:false NetNSMountsUnderStateDir:false EnableUnprivilegedPorts:true EnableUnprivilegedICMP:false EnableCDI:false CDISpecDirs:[/etc/cdi /var/run/cdi] ImagePullProgressTimeout:5m0s DrainExecSyncIOTimeout:0s ImagePullWithSyncFs:false IgnoreDeprecationWarnings:[]} ContainerdRootDir:/var/lib/containerd ContainerdEndpoint:/run/containerd/containerd.sock RootDir:/var/lib/containerd/io.containerd.grpc.v1.cri StateDir:/run/containerd/io.containerd.grpc.v1.cri}"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319559044Z" level=info msg="Connect containerd service"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319595951Z" level=info msg="using legacy CRI server"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319604517Z" level=info msg="using experimental NRI integration - disable nri plugin to prevent this"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319695421Z" level=info msg="Get image filesystem path \"/var/lib/containerd/io.containerd.snapshotter.v1.overlayfs\""
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.320725355Z" level=info msg=serving... address=/run/containerd/containerd.sock.ttrpc
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.320786015Z" level=info msg=serving... address=/run/containerd/containerd.sock
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.321270680Z" level=info msg="Start subscribing containerd event"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.321325367Z" level=info msg="Start recovering state"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.416492174Z" level=info msg="Start event monitor"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.416555681Z" level=info msg="Start snapshots syncer"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.416569178Z" level=info msg="Start cni network conf syncer for default"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.416579574Z" level=info msg="Start streaming server"
	Sep 16 10:48:02 functional-911502 systemd[1]: Started containerd container runtime.
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.422346988Z" level=info msg="containerd successfully booted in 0.264155s"
	
	
	==> coredns [11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39497 - 35637 "HINFO IN 756688810597303784.5152931065563193714. udp 56 false 512" NXDOMAIN qr,rd,ra 56 0.040061481s
	
	
	==> describe nodes <==
	Name:               functional-911502
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-911502
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-911502
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_40_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:47:36 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-911502
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:47:59 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:47:49 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:47:49 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:47:49 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:47:49 +0000   Mon, 16 Sep 2024 10:47:36 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-911502
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 43a21049ac0d40628479cf884a8089e0
	  System UUID:                2830f6a5-4b63-46c5-b24a-468e4df19b79
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-6kw9d                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     22s
	  kube-system                 etcd-functional-911502                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         29s
	  kube-system                 kindnet-7r2rg                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      22s
	  kube-system                 kube-apiserver-functional-911502             250m (12%)    0 (0%)      0 (0%)           0 (0%)         27s
	  kube-system                 kube-controller-manager-functional-911502    200m (10%)    0 (0%)      0 (0%)           0 (0%)         27s
	  kube-system                 kube-proxy-l59dx                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         22s
	  kube-system                 kube-scheduler-functional-911502             100m (5%)     0 (0%)      0 (0%)           0 (0%)         27s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         22s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age   From             Message
	  ----     ------                   ----  ----             -------
	  Normal   Starting                 21s   kube-proxy       
	  Normal   Starting                 27s   kubelet          Starting kubelet.
	  Warning  CgroupV1                 27s   kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  27s   kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  27s   kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    27s   kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     27s   kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           23s   node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31] <==
	{"level":"info","ts":"2024-09-16T10:47:33.131560Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:47:33.139317Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:47:33.139485Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:47:33.138740Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:47:33.139730Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:47:33.254739Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T10:47:33.254930Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T10:47:33.255031Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 1"}
	{"level":"info","ts":"2024-09-16T10:47:33.255134Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.255208Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.255286Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.255359Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.262769Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.270884Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:47:33.271109Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.271402Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.272620Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.283805Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.271431Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284172Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284977Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.289591Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.306735Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306879Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306916Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	
	
	==> kernel <==
	 10:48:06 up 1 day, 14:30,  0 users,  load average: 2.58, 1.30, 1.39
	Linux functional-911502 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b] <==
	I0916 10:47:45.041351       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:47:45.041663       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:47:45.041804       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:47:45.041819       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:47:45.041830       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:47:45.520963       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:47:45.521152       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:47:45.521205       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:47:45.722171       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:47:45.722199       1 metrics.go:61] Registering metrics
	I0916 10:47:45.722266       1 controller.go:374] Syncing nftables rules
	I0916 10:47:55.524822       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:47:55.524859       1 main.go:299] handling current node
	I0916 10:48:05.526968       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:05.527019       1 main.go:299] handling current node
	
	
	==> kube-apiserver [a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19] <==
	I0916 10:47:36.526042       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:47:36.526212       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	E0916 10:47:36.567945       1 controller.go:145] "Failed to ensure lease exists, will retry" err="namespaces \"kube-system\" not found" interval="200ms"
	E0916 10:47:36.572000       1 controller.go:148] "Unhandled Error" err="while syncing ConfigMap \"kube-system/kube-apiserver-legacy-service-account-token-tracking\", err: namespaces \"kube-system\" not found" logger="UnhandledError"
	I0916 10:47:36.577644       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:47:36.580467       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:47:36.580621       1 policy_source.go:224] refreshing policies
	I0916 10:47:36.603683       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:47:36.607429       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 10:47:36.774829       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:47:37.212486       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0916 10:47:37.220769       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0916 10:47:37.221471       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:47:37.822723       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:47:37.869935       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:47:38.023960       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 10:47:38.032609       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2]
	I0916 10:47:38.034010       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:47:38.039301       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:47:38.366073       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:47:39.181277       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:47:39.195876       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 10:47:39.209196       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:47:43.921976       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 10:47:44.089159       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	
	
	==> kube-controller-manager [928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a] <==
	I0916 10:47:43.260717       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:47:43.260741       1 shared_informer.go:320] Caches are synced for legacy-service-account-token-cleaner
	I0916 10:47:43.260789       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:47:43.264947       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:47:43.270925       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:43.274016       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:47:43.289621       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:47:43.309403       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:47:43.755815       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810781       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810815       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:47:43.822097       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:44.310052       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="384.204702ms"
	I0916 10:47:44.362819       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="52.71544ms"
	I0916 10:47:44.426732       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="63.859952ms"
	I0916 10:47:44.459758       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="32.950043ms"
	I0916 10:47:44.479101       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="19.296296ms"
	I0916 10:47:44.479182       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.386µs"
	I0916 10:47:46.277218       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="54.999µs"
	I0916 10:47:46.284221       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="47.499µs"
	I0916 10:47:46.289165       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.016µs"
	I0916 10:47:49.776695       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:56.302801       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="57.165µs"
	I0916 10:47:57.316220       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="18.588165ms"
	I0916 10:47:57.316368       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="102.169µs"
	
	
	==> kube-proxy [57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6] <==
	I0916 10:47:44.794046       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:47:44.895525       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:47:44.895614       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:47:44.916570       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:47:44.916637       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:47:44.918597       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:47:44.919425       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:47:44.919452       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:47:44.925419       1 config.go:199] "Starting service config controller"
	I0916 10:47:44.925472       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:47:44.925521       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:47:44.925531       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:47:44.928903       1 config.go:328] "Starting node config controller"
	I0916 10:47:44.928927       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:47:45.030179       1 shared_informer.go:320] Caches are synced for node config
	I0916 10:47:45.030253       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:47:45.030287       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44] <==
	W0916 10:47:37.419477       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0916 10:47:37.419493       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419534       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:47:37.419548       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419590       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:47:37.419607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419645       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419660       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419704       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419719       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422428       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:47:37.422472       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422530       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:47:37.422547       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422779       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 10:47:37.422805       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 10:47:37.422862       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:47:37.422883       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424481       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.424516       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424588       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:47:37.424607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424692       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:47:37.424724       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:47:38.910602       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:47:44 functional-911502 kubelet[1540]: I0916 10:47:44.361582    1540 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 10:47:44 functional-911502 kubelet[1540]: E0916 10:47:44.392120    1540 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config-volume kube-api-access-qfzm8], unattached volumes=[], failed to process volumes=[]: context canceled" pod="kube-system/coredns-7c65d6cfc9-ks82c" podUID="0dc23912-43e2-42db-a49b-97f879c4f7b3"
	Sep 16 10:47:44 functional-911502 kubelet[1540]: I0916 10:47:44.729530    1540 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dzjt\" (UniqueName: \"kubernetes.io/projected/ecac562d-8318-4226-b5f1-61f2c76bb51b-kube-api-access-6dzjt\") pod \"storage-provisioner\" (UID: \"ecac562d-8318-4226-b5f1-61f2c76bb51b\") " pod="kube-system/storage-provisioner"
	Sep 16 10:47:44 functional-911502 kubelet[1540]: I0916 10:47:44.729588    1540 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/ecac562d-8318-4226-b5f1-61f2c76bb51b-tmp\") pod \"storage-provisioner\" (UID: \"ecac562d-8318-4226-b5f1-61f2c76bb51b\") " pod="kube-system/storage-provisioner"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.289061    1540 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kindnet-7r2rg" podStartSLOduration=1.289039949 podStartE2EDuration="1.289039949s" podCreationTimestamp="2024-09-16 10:47:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:47:45.250464079 +0000 UTC m=+6.287018312" watchObservedRunningTime="2024-09-16 10:47:45.289039949 +0000 UTC m=+6.325594190"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.289338    1540 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-proxy-l59dx" podStartSLOduration=1.289330753 podStartE2EDuration="1.289330753s" podCreationTimestamp="2024-09-16 10:47:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:47:45.288652505 +0000 UTC m=+6.325206746" watchObservedRunningTime="2024-09-16 10:47:45.289330753 +0000 UTC m=+6.325884994"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: E0916 10:47:45.327027    1540 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\": failed to find network info for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\""
	Sep 16 10:47:45 functional-911502 kubelet[1540]: E0916 10:47:45.327169    1540 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\": failed to find network info for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\"" pod="kube-system/coredns-7c65d6cfc9-6kw9d"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: E0916 10:47:45.327193    1540 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\": failed to find network info for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\"" pod="kube-system/coredns-7c65d6cfc9-6kw9d"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: E0916 10:47:45.327263    1540 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-6kw9d_kube-system(072167c7-fa1a-463e-a957-91ea24020387)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-6kw9d_kube-system(072167c7-fa1a-463e-a957-91ea24020387)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\\\": failed to find network info for sandbox \\\"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\\\"\"" pod="kube-system/coredns-7c65d6cfc9-6kw9d" podUID="072167c7-fa1a-463e-a957-91ea24020387"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.341081    1540 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfzm8\" (UniqueName: \"kubernetes.io/projected/0dc23912-43e2-42db-a49b-97f879c4f7b3-kube-api-access-qfzm8\") pod \"0dc23912-43e2-42db-a49b-97f879c4f7b3\" (UID: \"0dc23912-43e2-42db-a49b-97f879c4f7b3\") "
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.341496    1540 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0dc23912-43e2-42db-a49b-97f879c4f7b3-config-volume\") pod \"0dc23912-43e2-42db-a49b-97f879c4f7b3\" (UID: \"0dc23912-43e2-42db-a49b-97f879c4f7b3\") "
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.345768    1540 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0dc23912-43e2-42db-a49b-97f879c4f7b3-config-volume" (OuterVolumeSpecName: "config-volume") pod "0dc23912-43e2-42db-a49b-97f879c4f7b3" (UID: "0dc23912-43e2-42db-a49b-97f879c4f7b3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue ""
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.347205    1540 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dc23912-43e2-42db-a49b-97f879c4f7b3-kube-api-access-qfzm8" (OuterVolumeSpecName: "kube-api-access-qfzm8") pod "0dc23912-43e2-42db-a49b-97f879c4f7b3" (UID: "0dc23912-43e2-42db-a49b-97f879c4f7b3"). InnerVolumeSpecName "kube-api-access-qfzm8". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.442441    1540 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-qfzm8\" (UniqueName: \"kubernetes.io/projected/0dc23912-43e2-42db-a49b-97f879c4f7b3-kube-api-access-qfzm8\") on node \"functional-911502\" DevicePath \"\""
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.442490    1540 reconciler_common.go:288] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0dc23912-43e2-42db-a49b-97f879c4f7b3-config-volume\") on node \"functional-911502\" DevicePath \"\""
	Sep 16 10:47:47 functional-911502 kubelet[1540]: I0916 10:47:47.080115    1540 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dc23912-43e2-42db-a49b-97f879c4f7b3" path="/var/lib/kubelet/pods/0dc23912-43e2-42db-a49b-97f879c4f7b3/volumes"
	Sep 16 10:47:47 functional-911502 kubelet[1540]: I0916 10:47:47.380422    1540 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/storage-provisioner" podStartSLOduration=3.380393704 podStartE2EDuration="3.380393704s" podCreationTimestamp="2024-09-16 10:47:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:47:46.302196758 +0000 UTC m=+7.338751015" watchObservedRunningTime="2024-09-16 10:47:47.380393704 +0000 UTC m=+8.416947937"
	Sep 16 10:47:49 functional-911502 kubelet[1540]: I0916 10:47:49.759905    1540 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 10:47:49 functional-911502 kubelet[1540]: I0916 10:47:49.761214    1540 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 10:47:57 functional-911502 kubelet[1540]: I0916 10:47:57.295790    1540 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-7c65d6cfc9-6kw9d" podStartSLOduration=13.295770937 podStartE2EDuration="13.295770937s" podCreationTimestamp="2024-09-16 10:47:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:47:56.309986989 +0000 UTC m=+17.346541230" watchObservedRunningTime="2024-09-16 10:47:57.295770937 +0000 UTC m=+18.332325178"
	Sep 16 10:48:02 functional-911502 kubelet[1540]: W0916 10:48:02.313775    1540 logging.go:55] [core] [Channel #1 SubChannel #2]grpc: addrConn.createTransport failed to connect to {Addr: "/run/containerd/containerd.sock", ServerName: "localhost", }. Err: connection error: desc = "transport: Error while dialing: dial unix /run/containerd/containerd.sock: connect: no such file or directory"
	Sep 16 10:48:02 functional-911502 kubelet[1540]: E0916 10:48:02.313866    1540 log.go:32] "ListPodSandbox with filter from runtime service failed" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial unix /run/containerd/containerd.sock: connect: no such file or directory\"" filter="nil"
	Sep 16 10:48:02 functional-911502 kubelet[1540]: E0916 10:48:02.313905    1540 kuberuntime_sandbox.go:305] "Failed to list pod sandboxes" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial unix /run/containerd/containerd.sock: connect: no such file or directory\""
	Sep 16 10:48:02 functional-911502 kubelet[1540]: E0916 10:48:02.313919    1540 generic.go:238] "GenericPLEG: Unable to retrieve pods" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial unix /run/containerd/containerd.sock: connect: no such file or directory\""
	
	
	==> storage-provisioner [8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27] <==
	I0916 10:47:45.429681       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:47:45.445556       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:47:45.446351       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:47:45.454850       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:47:45.455177       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-911502_5b879e6d-55a8-4fac-8967-8695891e6ebb!
	I0916 10:47:45.456424       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"28a802e4-0156-4c92-adef-4d6f2592a206", APIVersion:"v1", ResourceVersion:"387", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-911502_5b879e6d-55a8-4fac-8967-8695891e6ebb became leader
	I0916 10:47:45.555346       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-911502_5b879e6d-55a8-4fac-8967-8695891e6ebb!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-911502 -n functional-911502
helpers_test.go:261: (dbg) Run:  kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (591.504µs)
helpers_test.go:263: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/serial/KubeContext (2.20s)

                                                
                                    
x
+
TestFunctional/serial/KubectlGetPods (2.25s)

                                                
                                                
=== RUN   TestFunctional/serial/KubectlGetPods
functional_test.go:696: (dbg) Run:  kubectl --context functional-911502 get po -A
functional_test.go:696: (dbg) Non-zero exit: kubectl --context functional-911502 get po -A: fork/exec /usr/local/bin/kubectl: exec format error (439.497µs)
functional_test.go:698: failed to get kubectl pods: args "kubectl --context functional-911502 get po -A" : fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:705: expected stdout to include *kube-system* but got *""*. args: "kubectl --context functional-911502 get po -A"
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/serial/KubectlGetPods]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-911502
helpers_test.go:235: (dbg) docker inspect functional-911502:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1",
	        "Created": "2024-09-16T10:47:14.597354828Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2085675,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:47:14.7319597Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hostname",
	        "HostsPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hosts",
	        "LogPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1-json.log",
	        "Name": "/functional-911502",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-911502:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-911502",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/merged",
	                "UpperDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/diff",
	                "WorkDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-911502",
	                "Source": "/var/lib/docker/volumes/functional-911502/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-911502",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-911502",
	                "name.minikube.sigs.k8s.io": "functional-911502",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "44bb4c6f2ec0f0eef04adb8f886d0e0de7d31ae50612de741bed0ee945b2b75e",
	            "SandboxKey": "/var/run/docker/netns/44bb4c6f2ec0",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40592"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40593"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40596"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40594"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40595"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-911502": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "8c4428adf23c812318456ac17bea5953b33d7961994dfc84c0ff82a45764b662",
	                    "EndpointID": "8b3cc6f2c9f87b61b7e755d7ecd320ed6313887dfb3deab9f4e0858aa1c9fe80",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-911502",
	                        "9bf795605895"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-911502 -n functional-911502
helpers_test.go:244: <<< TestFunctional/serial/KubectlGetPods FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/serial/KubectlGetPods]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 logs -n 25: (1.366942514s)
helpers_test.go:252: TestFunctional/serial/KubectlGetPods logs: 
-- stdout --
	
	==> Audit <==
	|------------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	|  Command   |              Args              |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|------------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| addons     | addons-451841 addons           | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|            | disable metrics-server         |                   |         |         |                     |                     |
	|            | --alsologtostderr -v=1         |                   |         |         |                     |                     |
	| stop       | -p addons-451841               | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	| addons     | enable dashboard -p            | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|            | addons-451841                  |                   |         |         |                     |                     |
	| addons     | disable dashboard -p           | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|            | addons-451841                  |                   |         |         |                     |                     |
	| addons     | disable gvisor -p              | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	|            | addons-451841                  |                   |         |         |                     |                     |
	| delete     | -p addons-451841               | addons-451841     | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:45 UTC |
	| start      | -p dockerenv-701218            | dockerenv-701218  | jenkins | v1.34.0 | 16 Sep 24 10:45 UTC | 16 Sep 24 10:46 UTC |
	|            | --driver=docker                |                   |         |         |                     |                     |
	|            | --container-runtime=containerd |                   |         |         |                     |                     |
	| docker-env | --ssh-host --ssh-add -p        | dockerenv-701218  | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|            | dockerenv-701218               |                   |         |         |                     |                     |
	| delete     | -p dockerenv-701218            | dockerenv-701218  | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	| start      | -p nospam-826306 -n=1          | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|            | --memory=2250 --wait=false     |                   |         |         |                     |                     |
	|            | --log_dir=/tmp/nospam-826306   |                   |         |         |                     |                     |
	|            | --driver=docker                |                   |         |         |                     |                     |
	|            | --container-runtime=containerd |                   |         |         |                     |                     |
	| start      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|            | /tmp/nospam-826306 start       |                   |         |         |                     |                     |
	|            | --dry-run                      |                   |         |         |                     |                     |
	| start      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|            | /tmp/nospam-826306 start       |                   |         |         |                     |                     |
	|            | --dry-run                      |                   |         |         |                     |                     |
	| start      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC |                     |
	|            | /tmp/nospam-826306 start       |                   |         |         |                     |                     |
	|            | --dry-run                      |                   |         |         |                     |                     |
	| pause      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:46 UTC |
	|            | /tmp/nospam-826306 pause       |                   |         |         |                     |                     |
	| pause      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:46 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 pause       |                   |         |         |                     |                     |
	| pause      | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 pause       |                   |         |         |                     |                     |
	| unpause    | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 unpause     |                   |         |         |                     |                     |
	| unpause    | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 unpause     |                   |         |         |                     |                     |
	| unpause    | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 unpause     |                   |         |         |                     |                     |
	| stop       | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 stop        |                   |         |         |                     |                     |
	| stop       | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 stop        |                   |         |         |                     |                     |
	| stop       | nospam-826306 --log_dir        | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | /tmp/nospam-826306 stop        |                   |         |         |                     |                     |
	| delete     | -p nospam-826306               | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	| start      | -p functional-911502           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|            | --memory=4000                  |                   |         |         |                     |                     |
	|            | --apiserver-port=8441          |                   |         |         |                     |                     |
	|            | --wait=all --driver=docker     |                   |         |         |                     |                     |
	|            | --container-runtime=containerd |                   |         |         |                     |                     |
	| start      | -p functional-911502           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:48 UTC |
	|            | --alsologtostderr -v=8         |                   |         |         |                     |                     |
	|------------|--------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:47:58
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:47:58.785399 2088165 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:47:58.785661 2088165 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:47:58.785692 2088165 out.go:358] Setting ErrFile to fd 2...
	I0916 10:47:58.785711 2088165 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:47:58.786080 2088165 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:47:58.786528 2088165 out.go:352] Setting JSON to false
	I0916 10:47:58.787669 2088165 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":138621,"bootTime":1726345058,"procs":201,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:47:58.787775 2088165 start.go:139] virtualization:  
	I0916 10:47:58.790932 2088165 out.go:177] * [functional-911502] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:47:58.793408 2088165 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:47:58.793528 2088165 notify.go:220] Checking for updates...
	I0916 10:47:58.797761 2088165 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:47:58.799713 2088165 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:47:58.801637 2088165 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:47:58.804187 2088165 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:47:58.806963 2088165 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:47:58.809598 2088165 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:47:58.809697 2088165 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:47:58.832380 2088165 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:47:58.832507 2088165 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:47:58.895646 2088165 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:47:58.885470517 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:47:58.895754 2088165 docker.go:318] overlay module found
	I0916 10:47:58.899884 2088165 out.go:177] * Using the docker driver based on existing profile
	I0916 10:47:58.902322 2088165 start.go:297] selected driver: docker
	I0916 10:47:58.902344 2088165 start.go:901] validating driver "docker" against &{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountU
ID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:47:58.902463 2088165 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:47:58.902578 2088165 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:47:58.953781 2088165 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:47:58.94381861 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aarc
h64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:47:58.954212 2088165 cni.go:84] Creating CNI manager for ""
	I0916 10:47:58.954285 2088165 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:47:58.954337 2088165 start.go:340] cluster config:
	{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQe
muFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:47:58.956566 2088165 out.go:177] * Starting "functional-911502" primary control-plane node in "functional-911502" cluster
	I0916 10:47:58.958887 2088165 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:47:58.961214 2088165 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:47:58.963438 2088165 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:47:58.963795 2088165 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:47:58.963796 2088165 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:47:58.963825 2088165 cache.go:56] Caching tarball of preloaded images
	I0916 10:47:58.963907 2088165 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:47:58.963916 2088165 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:47:58.964022 2088165 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/config.json ...
	W0916 10:47:58.982466 2088165 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:47:58.982488 2088165 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:47:58.982561 2088165 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:47:58.982583 2088165 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:47:58.982591 2088165 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:47:58.982599 2088165 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:47:58.982609 2088165 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:47:58.983989 2088165 image.go:273] response: 
	I0916 10:47:59.121036 2088165 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:47:59.121078 2088165 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:47:59.121125 2088165 start.go:360] acquireMachinesLock for functional-911502: {Name:mk182321dd921c9bc14d73d2af41d001efc879fd Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:47:59.121202 2088165 start.go:364] duration metric: took 53.079µs to acquireMachinesLock for "functional-911502"
	I0916 10:47:59.121226 2088165 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:47:59.121232 2088165 fix.go:54] fixHost starting: 
	I0916 10:47:59.121521 2088165 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:47:59.141783 2088165 fix.go:112] recreateIfNeeded on functional-911502: state=Running err=<nil>
	W0916 10:47:59.141816 2088165 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:47:59.144721 2088165 out.go:177] * Updating the running docker "functional-911502" container ...
	I0916 10:47:59.146980 2088165 machine.go:93] provisionDockerMachine start ...
	I0916 10:47:59.147079 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:47:59.164690 2088165 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:59.164967 2088165 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:47:59.164983 2088165 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:47:59.302200 2088165 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-911502
	
	I0916 10:47:59.302234 2088165 ubuntu.go:169] provisioning hostname "functional-911502"
	I0916 10:47:59.302315 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:47:59.319540 2088165 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:59.319848 2088165 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:47:59.319864 2088165 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-911502 && echo "functional-911502" | sudo tee /etc/hostname
	I0916 10:47:59.483029 2088165 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-911502
	
	I0916 10:47:59.483111 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:47:59.501717 2088165 main.go:141] libmachine: Using SSH client type: native
	I0916 10:47:59.501970 2088165 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:47:59.501994 2088165 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-911502' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-911502/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-911502' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:47:59.638820 2088165 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:47:59.638848 2088165 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:47:59.638878 2088165 ubuntu.go:177] setting up certificates
	I0916 10:47:59.638887 2088165 provision.go:84] configureAuth start
	I0916 10:47:59.638945 2088165 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-911502
	I0916 10:47:59.654991 2088165 provision.go:143] copyHostCerts
	I0916 10:47:59.655034 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:47:59.655068 2088165 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:47:59.655080 2088165 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:47:59.655162 2088165 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:47:59.655296 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:47:59.655319 2088165 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:47:59.655328 2088165 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:47:59.655365 2088165 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:47:59.655419 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:47:59.655439 2088165 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:47:59.655446 2088165 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:47:59.655472 2088165 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:47:59.655531 2088165 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.functional-911502 san=[127.0.0.1 192.168.49.2 functional-911502 localhost minikube]
	I0916 10:48:00.409717 2088165 provision.go:177] copyRemoteCerts
	I0916 10:48:00.409931 2088165 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:48:00.410014 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:00.446778 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:00.562791 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:48:00.562870 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:48:00.594618 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:48:00.594736 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1220 bytes)
	I0916 10:48:00.624272 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:48:00.624360 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:48:00.651947 2088165 provision.go:87] duration metric: took 1.013043969s to configureAuth
	I0916 10:48:00.651976 2088165 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:48:00.652177 2088165 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:00.652190 2088165 machine.go:96] duration metric: took 1.505191275s to provisionDockerMachine
	I0916 10:48:00.652199 2088165 start.go:293] postStartSetup for "functional-911502" (driver="docker")
	I0916 10:48:00.652211 2088165 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:48:00.652275 2088165 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:48:00.652323 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:00.669602 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:00.768149 2088165 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:48:00.771695 2088165 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 10:48:00.771754 2088165 command_runner.go:130] > NAME="Ubuntu"
	I0916 10:48:00.771777 2088165 command_runner.go:130] > VERSION_ID="22.04"
	I0916 10:48:00.771789 2088165 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 10:48:00.771795 2088165 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 10:48:00.771799 2088165 command_runner.go:130] > ID=ubuntu
	I0916 10:48:00.771803 2088165 command_runner.go:130] > ID_LIKE=debian
	I0916 10:48:00.771807 2088165 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 10:48:00.771812 2088165 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 10:48:00.771818 2088165 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 10:48:00.771827 2088165 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 10:48:00.771832 2088165 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 10:48:00.771885 2088165 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:48:00.771925 2088165 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:48:00.771940 2088165 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:48:00.771947 2088165 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:48:00.771969 2088165 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:48:00.772032 2088165 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:48:00.772125 2088165 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:48:00.772139 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:48:00.772219 2088165 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts -> hosts in /etc/test/nested/copy/2063326
	I0916 10:48:00.772223 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts -> /etc/test/nested/copy/2063326/hosts
	I0916 10:48:00.772270 2088165 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/2063326
	I0916 10:48:00.781368 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:48:00.806768 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts --> /etc/test/nested/copy/2063326/hosts (40 bytes)
	I0916 10:48:00.831874 2088165 start.go:296] duration metric: took 179.657184ms for postStartSetup
	I0916 10:48:00.832033 2088165 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:48:00.832109 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:00.849667 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:00.947508 2088165 command_runner.go:130] > 21%
	I0916 10:48:00.948282 2088165 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:48:00.953002 2088165 command_runner.go:130] > 155G
	I0916 10:48:00.953615 2088165 fix.go:56] duration metric: took 1.832376025s for fixHost
	I0916 10:48:00.953635 2088165 start.go:83] releasing machines lock for "functional-911502", held for 1.832420973s
	I0916 10:48:00.953725 2088165 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-911502
	I0916 10:48:00.971275 2088165 ssh_runner.go:195] Run: cat /version.json
	I0916 10:48:00.971332 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:00.971591 2088165 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:48:00.971655 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:00.991183 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:01.008321 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:01.090086 2088165 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 10:48:01.090258 2088165 ssh_runner.go:195] Run: systemctl --version
	I0916 10:48:01.217382 2088165 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 10:48:01.220858 2088165 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 10:48:01.220957 2088165 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 10:48:01.221042 2088165 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:48:01.225330 2088165 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 10:48:01.225410 2088165 command_runner.go:130] >   Size: 78        	Blocks: 8          IO Block: 4096   regular file
	I0916 10:48:01.225431 2088165 command_runner.go:130] > Device: 3ch/60d	Inode: 1324618     Links: 1
	I0916 10:48:01.225445 2088165 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:48:01.225451 2088165 command_runner.go:130] > Access: 2024-09-16 10:47:17.590968226 +0000
	I0916 10:48:01.225457 2088165 command_runner.go:130] > Modify: 2024-09-16 10:47:17.562968404 +0000
	I0916 10:48:01.225475 2088165 command_runner.go:130] > Change: 2024-09-16 10:47:17.562968404 +0000
	I0916 10:48:01.225491 2088165 command_runner.go:130] >  Birth: 2024-09-16 10:47:17.562968404 +0000
	I0916 10:48:01.225749 2088165 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:48:01.245017 2088165 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:48:01.245154 2088165 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:48:01.254936 2088165 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:48:01.254965 2088165 start.go:495] detecting cgroup driver to use...
	I0916 10:48:01.255025 2088165 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:48:01.255096 2088165 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:48:01.268726 2088165 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:48:01.281208 2088165 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:48:01.281273 2088165 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:48:01.295687 2088165 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:48:01.308064 2088165 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:48:01.417725 2088165 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:48:01.540113 2088165 docker.go:233] disabling docker service ...
	I0916 10:48:01.540218 2088165 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:48:01.554507 2088165 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:48:01.567763 2088165 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:48:01.684493 2088165 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:48:01.820121 2088165 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:48:01.834026 2088165 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:48:01.852747 2088165 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0916 10:48:01.854272 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:48:01.866074 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:48:01.877013 2088165 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:48:01.877089 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:48:01.887138 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:48:01.898151 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:48:01.908730 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:48:01.919968 2088165 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:48:01.929885 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:48:01.940722 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:48:01.951693 2088165 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:48:01.962633 2088165 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:48:01.971719 2088165 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 10:48:01.973238 2088165 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:48:01.983217 2088165 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:02.116216 2088165 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:48:02.423701 2088165 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:48:02.423774 2088165 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:48:02.427450 2088165 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0916 10:48:02.427474 2088165 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 10:48:02.427487 2088165 command_runner.go:130] > Device: 45h/69d	Inode: 620         Links: 1
	I0916 10:48:02.427494 2088165 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:48:02.427502 2088165 command_runner.go:130] > Access: 2024-09-16 10:48:02.406683973 +0000
	I0916 10:48:02.427507 2088165 command_runner.go:130] > Modify: 2024-09-16 10:48:02.314684556 +0000
	I0916 10:48:02.427513 2088165 command_runner.go:130] > Change: 2024-09-16 10:48:02.314684556 +0000
	I0916 10:48:02.427517 2088165 command_runner.go:130] >  Birth: -
	I0916 10:48:02.427535 2088165 start.go:563] Will wait 60s for crictl version
	I0916 10:48:02.427599 2088165 ssh_runner.go:195] Run: which crictl
	I0916 10:48:02.430997 2088165 command_runner.go:130] > /usr/bin/crictl
	I0916 10:48:02.431080 2088165 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:48:02.471685 2088165 command_runner.go:130] > Version:  0.1.0
	I0916 10:48:02.471711 2088165 command_runner.go:130] > RuntimeName:  containerd
	I0916 10:48:02.471725 2088165 command_runner.go:130] > RuntimeVersion:  1.7.22
	I0916 10:48:02.471730 2088165 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 10:48:02.474385 2088165 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:48:02.474457 2088165 ssh_runner.go:195] Run: containerd --version
	I0916 10:48:02.501051 2088165 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 10:48:02.502956 2088165 ssh_runner.go:195] Run: containerd --version
	I0916 10:48:02.529886 2088165 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 10:48:02.535085 2088165 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:48:02.537930 2088165 cli_runner.go:164] Run: docker network inspect functional-911502 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:48:02.552276 2088165 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:48:02.556374 2088165 command_runner.go:130] > 192.168.49.1	host.minikube.internal
	I0916 10:48:02.556587 2088165 kubeadm.go:883] updating cluster {Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA API
ServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryM
irror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:48:02.556724 2088165 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:48:02.556849 2088165 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:02.594114 2088165 command_runner.go:130] > {
	I0916 10:48:02.594134 2088165 command_runner.go:130] >   "images": [
	I0916 10:48:02.594141 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594151 2088165 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 10:48:02.594156 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594163 2088165 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 10:48:02.594167 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594171 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594180 2088165 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 10:48:02.594183 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594187 2088165 command_runner.go:130] >       "size": "33309097",
	I0916 10:48:02.594195 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.594198 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594204 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594208 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594237 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594240 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594249 2088165 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 10:48:02.594255 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594260 2088165 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 10:48:02.594266 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594270 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594279 2088165 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 10:48:02.594282 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594286 2088165 command_runner.go:130] >       "size": "8034419",
	I0916 10:48:02.594289 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.594294 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594298 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594305 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594309 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594312 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594322 2088165 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 10:48:02.594326 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594335 2088165 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 10:48:02.594346 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594350 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594358 2088165 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 10:48:02.594362 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594366 2088165 command_runner.go:130] >       "size": "16948420",
	I0916 10:48:02.594372 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.594376 2088165 command_runner.go:130] >       "username": "nonroot",
	I0916 10:48:02.594382 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594388 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594391 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594397 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594403 2088165 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 10:48:02.594411 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594417 2088165 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 10:48:02.594420 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594425 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594435 2088165 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 10:48:02.594441 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594445 2088165 command_runner.go:130] >       "size": "66535646",
	I0916 10:48:02.594449 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.594453 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.594456 2088165 command_runner.go:130] >       },
	I0916 10:48:02.594460 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594464 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594470 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594474 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594479 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594487 2088165 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 10:48:02.594492 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594498 2088165 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 10:48:02.594504 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594508 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594519 2088165 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 10:48:02.594523 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594529 2088165 command_runner.go:130] >       "size": "25687130",
	I0916 10:48:02.594533 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.594537 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.594541 2088165 command_runner.go:130] >       },
	I0916 10:48:02.594545 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594551 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594555 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594563 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594566 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594573 2088165 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 10:48:02.594579 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594586 2088165 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 10:48:02.594589 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594593 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594602 2088165 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 10:48:02.594608 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594612 2088165 command_runner.go:130] >       "size": "23948670",
	I0916 10:48:02.594616 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.594619 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.594623 2088165 command_runner.go:130] >       },
	I0916 10:48:02.594626 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594630 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594633 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594636 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594639 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594646 2088165 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 10:48:02.594650 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594655 2088165 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 10:48:02.594660 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594664 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594708 2088165 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 10:48:02.594716 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594720 2088165 command_runner.go:130] >       "size": "26756812",
	I0916 10:48:02.594724 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.594727 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594731 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594735 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594738 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594741 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594749 2088165 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 10:48:02.594755 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594761 2088165 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 10:48:02.594764 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594768 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594780 2088165 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 10:48:02.594785 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594789 2088165 command_runner.go:130] >       "size": "18507674",
	I0916 10:48:02.594793 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.594797 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.594803 2088165 command_runner.go:130] >       },
	I0916 10:48:02.594806 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594813 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594817 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.594820 2088165 command_runner.go:130] >     },
	I0916 10:48:02.594824 2088165 command_runner.go:130] >     {
	I0916 10:48:02.594831 2088165 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 10:48:02.594837 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.594842 2088165 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 10:48:02.594845 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594851 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.594859 2088165 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 10:48:02.594865 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.594869 2088165 command_runner.go:130] >       "size": "267933",
	I0916 10:48:02.594873 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.594877 2088165 command_runner.go:130] >         "value": "65535"
	I0916 10:48:02.594882 2088165 command_runner.go:130] >       },
	I0916 10:48:02.594886 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.594892 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.594896 2088165 command_runner.go:130] >       "pinned": true
	I0916 10:48:02.594899 2088165 command_runner.go:130] >     }
	I0916 10:48:02.594903 2088165 command_runner.go:130] >   ]
	I0916 10:48:02.594908 2088165 command_runner.go:130] > }
	I0916 10:48:02.595084 2088165 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:48:02.595096 2088165 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:48:02.595159 2088165 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:02.632598 2088165 command_runner.go:130] > {
	I0916 10:48:02.632619 2088165 command_runner.go:130] >   "images": [
	I0916 10:48:02.632625 2088165 command_runner.go:130] >     {
	I0916 10:48:02.632635 2088165 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 10:48:02.632640 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.632649 2088165 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 10:48:02.632653 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632657 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.632666 2088165 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 10:48:02.632669 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632673 2088165 command_runner.go:130] >       "size": "33309097",
	I0916 10:48:02.632677 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.632681 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.632684 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.632688 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.632692 2088165 command_runner.go:130] >     },
	I0916 10:48:02.632695 2088165 command_runner.go:130] >     {
	I0916 10:48:02.632712 2088165 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 10:48:02.632722 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.632765 2088165 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 10:48:02.632780 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632784 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.632798 2088165 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 10:48:02.632802 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632806 2088165 command_runner.go:130] >       "size": "8034419",
	I0916 10:48:02.632809 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.632813 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.632817 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.632820 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.632824 2088165 command_runner.go:130] >     },
	I0916 10:48:02.632827 2088165 command_runner.go:130] >     {
	I0916 10:48:02.632834 2088165 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 10:48:02.632837 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.632844 2088165 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 10:48:02.632848 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632851 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.632871 2088165 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 10:48:02.632881 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632887 2088165 command_runner.go:130] >       "size": "16948420",
	I0916 10:48:02.632891 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.632895 2088165 command_runner.go:130] >       "username": "nonroot",
	I0916 10:48:02.632898 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.632902 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.632905 2088165 command_runner.go:130] >     },
	I0916 10:48:02.632909 2088165 command_runner.go:130] >     {
	I0916 10:48:02.632915 2088165 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 10:48:02.632919 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.632923 2088165 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 10:48:02.632930 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632934 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.632942 2088165 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 10:48:02.632945 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.632949 2088165 command_runner.go:130] >       "size": "66535646",
	I0916 10:48:02.632952 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.632956 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.632960 2088165 command_runner.go:130] >       },
	I0916 10:48:02.632964 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.632975 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.632982 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.632985 2088165 command_runner.go:130] >     },
	I0916 10:48:02.632988 2088165 command_runner.go:130] >     {
	I0916 10:48:02.632995 2088165 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 10:48:02.632998 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.633003 2088165 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 10:48:02.633006 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633010 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.633020 2088165 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 10:48:02.633030 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633034 2088165 command_runner.go:130] >       "size": "25687130",
	I0916 10:48:02.633037 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.633041 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.633044 2088165 command_runner.go:130] >       },
	I0916 10:48:02.633048 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.633052 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.633058 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.633061 2088165 command_runner.go:130] >     },
	I0916 10:48:02.633064 2088165 command_runner.go:130] >     {
	I0916 10:48:02.633071 2088165 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 10:48:02.633074 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.633080 2088165 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 10:48:02.633083 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633086 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.633094 2088165 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 10:48:02.633098 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633102 2088165 command_runner.go:130] >       "size": "23948670",
	I0916 10:48:02.633105 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.633108 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.633112 2088165 command_runner.go:130] >       },
	I0916 10:48:02.633116 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.633120 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.633123 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.633126 2088165 command_runner.go:130] >     },
	I0916 10:48:02.633129 2088165 command_runner.go:130] >     {
	I0916 10:48:02.633135 2088165 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 10:48:02.633139 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.633144 2088165 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 10:48:02.633148 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633152 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.633160 2088165 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 10:48:02.633163 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633167 2088165 command_runner.go:130] >       "size": "26756812",
	I0916 10:48:02.633170 2088165 command_runner.go:130] >       "uid": null,
	I0916 10:48:02.633174 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.633177 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.633181 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.633184 2088165 command_runner.go:130] >     },
	I0916 10:48:02.633186 2088165 command_runner.go:130] >     {
	I0916 10:48:02.633193 2088165 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 10:48:02.633196 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.633201 2088165 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 10:48:02.633204 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633208 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.633222 2088165 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 10:48:02.633225 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633229 2088165 command_runner.go:130] >       "size": "18507674",
	I0916 10:48:02.633232 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.633236 2088165 command_runner.go:130] >         "value": "0"
	I0916 10:48:02.633239 2088165 command_runner.go:130] >       },
	I0916 10:48:02.633242 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.633246 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.633249 2088165 command_runner.go:130] >       "pinned": false
	I0916 10:48:02.633253 2088165 command_runner.go:130] >     },
	I0916 10:48:02.633256 2088165 command_runner.go:130] >     {
	I0916 10:48:02.633263 2088165 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 10:48:02.633266 2088165 command_runner.go:130] >       "repoTags": [
	I0916 10:48:02.633272 2088165 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 10:48:02.633275 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633279 2088165 command_runner.go:130] >       "repoDigests": [
	I0916 10:48:02.633286 2088165 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 10:48:02.633289 2088165 command_runner.go:130] >       ],
	I0916 10:48:02.633293 2088165 command_runner.go:130] >       "size": "267933",
	I0916 10:48:02.633296 2088165 command_runner.go:130] >       "uid": {
	I0916 10:48:02.633300 2088165 command_runner.go:130] >         "value": "65535"
	I0916 10:48:02.633303 2088165 command_runner.go:130] >       },
	I0916 10:48:02.633308 2088165 command_runner.go:130] >       "username": "",
	I0916 10:48:02.633311 2088165 command_runner.go:130] >       "spec": null,
	I0916 10:48:02.633315 2088165 command_runner.go:130] >       "pinned": true
	I0916 10:48:02.633317 2088165 command_runner.go:130] >     }
	I0916 10:48:02.633320 2088165 command_runner.go:130] >   ]
	I0916 10:48:02.633323 2088165 command_runner.go:130] > }
	I0916 10:48:02.635227 2088165 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:48:02.635252 2088165 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:48:02.635266 2088165 kubeadm.go:934] updating node { 192.168.49.2 8441 v1.31.1 containerd true true} ...
	I0916 10:48:02.635414 2088165 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=functional-911502 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:48:02.635484 2088165 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:48:02.669886 2088165 command_runner.go:130] > {
	I0916 10:48:02.669909 2088165 command_runner.go:130] >   "status": {
	I0916 10:48:02.669915 2088165 command_runner.go:130] >     "conditions": [
	I0916 10:48:02.669919 2088165 command_runner.go:130] >       {
	I0916 10:48:02.669925 2088165 command_runner.go:130] >         "type": "RuntimeReady",
	I0916 10:48:02.669929 2088165 command_runner.go:130] >         "status": true,
	I0916 10:48:02.669935 2088165 command_runner.go:130] >         "reason": "",
	I0916 10:48:02.669939 2088165 command_runner.go:130] >         "message": ""
	I0916 10:48:02.669943 2088165 command_runner.go:130] >       },
	I0916 10:48:02.669949 2088165 command_runner.go:130] >       {
	I0916 10:48:02.669956 2088165 command_runner.go:130] >         "type": "NetworkReady",
	I0916 10:48:02.669964 2088165 command_runner.go:130] >         "status": true,
	I0916 10:48:02.669968 2088165 command_runner.go:130] >         "reason": "",
	I0916 10:48:02.669974 2088165 command_runner.go:130] >         "message": ""
	I0916 10:48:02.669978 2088165 command_runner.go:130] >       },
	I0916 10:48:02.669981 2088165 command_runner.go:130] >       {
	I0916 10:48:02.669987 2088165 command_runner.go:130] >         "type": "ContainerdHasNoDeprecationWarnings",
	I0916 10:48:02.669993 2088165 command_runner.go:130] >         "status": true,
	I0916 10:48:02.669997 2088165 command_runner.go:130] >         "reason": "",
	I0916 10:48:02.670001 2088165 command_runner.go:130] >         "message": ""
	I0916 10:48:02.670007 2088165 command_runner.go:130] >       }
	I0916 10:48:02.670010 2088165 command_runner.go:130] >     ]
	I0916 10:48:02.670013 2088165 command_runner.go:130] >   },
	I0916 10:48:02.670017 2088165 command_runner.go:130] >   "cniconfig": {
	I0916 10:48:02.670021 2088165 command_runner.go:130] >     "PluginDirs": [
	I0916 10:48:02.670025 2088165 command_runner.go:130] >       "/opt/cni/bin"
	I0916 10:48:02.670030 2088165 command_runner.go:130] >     ],
	I0916 10:48:02.670036 2088165 command_runner.go:130] >     "PluginConfDir": "/etc/cni/net.d",
	I0916 10:48:02.670040 2088165 command_runner.go:130] >     "PluginMaxConfNum": 1,
	I0916 10:48:02.670046 2088165 command_runner.go:130] >     "Prefix": "eth",
	I0916 10:48:02.670054 2088165 command_runner.go:130] >     "Networks": [
	I0916 10:48:02.670060 2088165 command_runner.go:130] >       {
	I0916 10:48:02.670064 2088165 command_runner.go:130] >         "Config": {
	I0916 10:48:02.670070 2088165 command_runner.go:130] >           "Name": "cni-loopback",
	I0916 10:48:02.670074 2088165 command_runner.go:130] >           "CNIVersion": "0.3.1",
	I0916 10:48:02.670080 2088165 command_runner.go:130] >           "Plugins": [
	I0916 10:48:02.670084 2088165 command_runner.go:130] >             {
	I0916 10:48:02.670097 2088165 command_runner.go:130] >               "Network": {
	I0916 10:48:02.670102 2088165 command_runner.go:130] >                 "type": "loopback",
	I0916 10:48:02.670105 2088165 command_runner.go:130] >                 "ipam": {},
	I0916 10:48:02.670109 2088165 command_runner.go:130] >                 "dns": {}
	I0916 10:48:02.670113 2088165 command_runner.go:130] >               },
	I0916 10:48:02.670121 2088165 command_runner.go:130] >               "Source": "{\"type\":\"loopback\"}"
	I0916 10:48:02.670124 2088165 command_runner.go:130] >             }
	I0916 10:48:02.670128 2088165 command_runner.go:130] >           ],
	I0916 10:48:02.670138 2088165 command_runner.go:130] >           "Source": "{\n\"cniVersion\": \"0.3.1\",\n\"name\": \"cni-loopback\",\n\"plugins\": [{\n  \"type\": \"loopback\"\n}]\n}"
	I0916 10:48:02.670144 2088165 command_runner.go:130] >         },
	I0916 10:48:02.670148 2088165 command_runner.go:130] >         "IFName": "lo"
	I0916 10:48:02.670151 2088165 command_runner.go:130] >       },
	I0916 10:48:02.670154 2088165 command_runner.go:130] >       {
	I0916 10:48:02.670159 2088165 command_runner.go:130] >         "Config": {
	I0916 10:48:02.670164 2088165 command_runner.go:130] >           "Name": "kindnet",
	I0916 10:48:02.670170 2088165 command_runner.go:130] >           "CNIVersion": "0.3.1",
	I0916 10:48:02.670176 2088165 command_runner.go:130] >           "Plugins": [
	I0916 10:48:02.670179 2088165 command_runner.go:130] >             {
	I0916 10:48:02.670183 2088165 command_runner.go:130] >               "Network": {
	I0916 10:48:02.670187 2088165 command_runner.go:130] >                 "type": "ptp",
	I0916 10:48:02.670191 2088165 command_runner.go:130] >                 "ipam": {
	I0916 10:48:02.670196 2088165 command_runner.go:130] >                   "type": "host-local"
	I0916 10:48:02.670202 2088165 command_runner.go:130] >                 },
	I0916 10:48:02.670227 2088165 command_runner.go:130] >                 "dns": {}
	I0916 10:48:02.670235 2088165 command_runner.go:130] >               },
	I0916 10:48:02.670252 2088165 command_runner.go:130] >               "Source": "{\"ipMasq\":false,\"ipam\":{\"dataDir\":\"/run/cni-ipam-state\",\"ranges\":[[{\"subnet\":\"10.244.0.0/24\"}]],\"routes\":[{\"dst\":\"0.0.0.0/0\"}],\"type\":\"host-local\"},\"mtu\":1500,\"type\":\"ptp\"}"
	I0916 10:48:02.670260 2088165 command_runner.go:130] >             },
	I0916 10:48:02.670265 2088165 command_runner.go:130] >             {
	I0916 10:48:02.670269 2088165 command_runner.go:130] >               "Network": {
	I0916 10:48:02.670274 2088165 command_runner.go:130] >                 "type": "portmap",
	I0916 10:48:02.670280 2088165 command_runner.go:130] >                 "capabilities": {
	I0916 10:48:02.670284 2088165 command_runner.go:130] >                   "portMappings": true
	I0916 10:48:02.670288 2088165 command_runner.go:130] >                 },
	I0916 10:48:02.670291 2088165 command_runner.go:130] >                 "ipam": {},
	I0916 10:48:02.670295 2088165 command_runner.go:130] >                 "dns": {}
	I0916 10:48:02.670301 2088165 command_runner.go:130] >               },
	I0916 10:48:02.670308 2088165 command_runner.go:130] >               "Source": "{\"capabilities\":{\"portMappings\":true},\"type\":\"portmap\"}"
	I0916 10:48:02.670314 2088165 command_runner.go:130] >             }
	I0916 10:48:02.670318 2088165 command_runner.go:130] >           ],
	I0916 10:48:02.670350 2088165 command_runner.go:130] >           "Source": "\n{\n\t\"cniVersion\": \"0.3.1\",\n\t\"name\": \"kindnet\",\n\t\"plugins\": [\n\t{\n\t\t\"type\": \"ptp\",\n\t\t\"ipMasq\": false,\n\t\t\"ipam\": {\n\t\t\t\"type\": \"host-local\",\n\t\t\t\"dataDir\": \"/run/cni-ipam-state\",\n\t\t\t\"routes\": [\n\t\t\t\t\n\t\t\t\t\n\t\t\t\t{ \"dst\": \"0.0.0.0/0\" }\n\t\t\t],\n\t\t\t\"ranges\": [\n\t\t\t\t\n\t\t\t\t\n\t\t\t\t[ { \"subnet\": \"10.244.0.0/24\" } ]\n\t\t\t]\n\t\t}\n\t\t,\n\t\t\"mtu\": 1500\n\t\t\n\t},\n\t{\n\t\t\"type\": \"portmap\",\n\t\t\"capabilities\": {\n\t\t\t\"portMappings\": true\n\t\t}\n\t}\n\t]\n}\n"
	I0916 10:48:02.670358 2088165 command_runner.go:130] >         },
	I0916 10:48:02.670362 2088165 command_runner.go:130] >         "IFName": "eth0"
	I0916 10:48:02.670365 2088165 command_runner.go:130] >       }
	I0916 10:48:02.670368 2088165 command_runner.go:130] >     ]
	I0916 10:48:02.670371 2088165 command_runner.go:130] >   },
	I0916 10:48:02.670375 2088165 command_runner.go:130] >   "config": {
	I0916 10:48:02.670380 2088165 command_runner.go:130] >     "containerd": {
	I0916 10:48:02.670386 2088165 command_runner.go:130] >       "snapshotter": "overlayfs",
	I0916 10:48:02.670391 2088165 command_runner.go:130] >       "defaultRuntimeName": "runc",
	I0916 10:48:02.670396 2088165 command_runner.go:130] >       "defaultRuntime": {
	I0916 10:48:02.670400 2088165 command_runner.go:130] >         "runtimeType": "",
	I0916 10:48:02.670404 2088165 command_runner.go:130] >         "runtimePath": "",
	I0916 10:48:02.670416 2088165 command_runner.go:130] >         "runtimeEngine": "",
	I0916 10:48:02.670420 2088165 command_runner.go:130] >         "PodAnnotations": null,
	I0916 10:48:02.670424 2088165 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 10:48:02.670428 2088165 command_runner.go:130] >         "runtimeRoot": "",
	I0916 10:48:02.670433 2088165 command_runner.go:130] >         "options": null,
	I0916 10:48:02.670441 2088165 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 10:48:02.670447 2088165 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 10:48:02.670455 2088165 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 10:48:02.670459 2088165 command_runner.go:130] >         "cniConfDir": "",
	I0916 10:48:02.670463 2088165 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 10:48:02.670469 2088165 command_runner.go:130] >         "snapshotter": "",
	I0916 10:48:02.670473 2088165 command_runner.go:130] >         "sandboxMode": ""
	I0916 10:48:02.670478 2088165 command_runner.go:130] >       },
	I0916 10:48:02.670485 2088165 command_runner.go:130] >       "untrustedWorkloadRuntime": {
	I0916 10:48:02.670499 2088165 command_runner.go:130] >         "runtimeType": "",
	I0916 10:48:02.670503 2088165 command_runner.go:130] >         "runtimePath": "",
	I0916 10:48:02.670507 2088165 command_runner.go:130] >         "runtimeEngine": "",
	I0916 10:48:02.670512 2088165 command_runner.go:130] >         "PodAnnotations": null,
	I0916 10:48:02.670522 2088165 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 10:48:02.670526 2088165 command_runner.go:130] >         "runtimeRoot": "",
	I0916 10:48:02.670530 2088165 command_runner.go:130] >         "options": null,
	I0916 10:48:02.670535 2088165 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 10:48:02.670542 2088165 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 10:48:02.670546 2088165 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 10:48:02.670551 2088165 command_runner.go:130] >         "cniConfDir": "",
	I0916 10:48:02.670556 2088165 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 10:48:02.670560 2088165 command_runner.go:130] >         "snapshotter": "",
	I0916 10:48:02.670566 2088165 command_runner.go:130] >         "sandboxMode": ""
	I0916 10:48:02.670569 2088165 command_runner.go:130] >       },
	I0916 10:48:02.670573 2088165 command_runner.go:130] >       "runtimes": {
	I0916 10:48:02.670579 2088165 command_runner.go:130] >         "runc": {
	I0916 10:48:02.670584 2088165 command_runner.go:130] >           "runtimeType": "io.containerd.runc.v2",
	I0916 10:48:02.670596 2088165 command_runner.go:130] >           "runtimePath": "",
	I0916 10:48:02.670601 2088165 command_runner.go:130] >           "runtimeEngine": "",
	I0916 10:48:02.670606 2088165 command_runner.go:130] >           "PodAnnotations": null,
	I0916 10:48:02.670610 2088165 command_runner.go:130] >           "ContainerAnnotations": null,
	I0916 10:48:02.670615 2088165 command_runner.go:130] >           "runtimeRoot": "",
	I0916 10:48:02.670621 2088165 command_runner.go:130] >           "options": {
	I0916 10:48:02.670626 2088165 command_runner.go:130] >             "SystemdCgroup": false
	I0916 10:48:02.670629 2088165 command_runner.go:130] >           },
	I0916 10:48:02.670642 2088165 command_runner.go:130] >           "privileged_without_host_devices": false,
	I0916 10:48:02.670651 2088165 command_runner.go:130] >           "privileged_without_host_devices_all_devices_allowed": false,
	I0916 10:48:02.670655 2088165 command_runner.go:130] >           "baseRuntimeSpec": "",
	I0916 10:48:02.670659 2088165 command_runner.go:130] >           "cniConfDir": "",
	I0916 10:48:02.670663 2088165 command_runner.go:130] >           "cniMaxConfNum": 0,
	I0916 10:48:02.670699 2088165 command_runner.go:130] >           "snapshotter": "",
	I0916 10:48:02.670704 2088165 command_runner.go:130] >           "sandboxMode": "podsandbox"
	I0916 10:48:02.670707 2088165 command_runner.go:130] >         }
	I0916 10:48:02.670710 2088165 command_runner.go:130] >       },
	I0916 10:48:02.670716 2088165 command_runner.go:130] >       "noPivot": false,
	I0916 10:48:02.670720 2088165 command_runner.go:130] >       "disableSnapshotAnnotations": true,
	I0916 10:48:02.670724 2088165 command_runner.go:130] >       "discardUnpackedLayers": true,
	I0916 10:48:02.670729 2088165 command_runner.go:130] >       "ignoreBlockIONotEnabledErrors": false,
	I0916 10:48:02.670733 2088165 command_runner.go:130] >       "ignoreRdtNotEnabledErrors": false
	I0916 10:48:02.670736 2088165 command_runner.go:130] >     },
	I0916 10:48:02.670739 2088165 command_runner.go:130] >     "cni": {
	I0916 10:48:02.670743 2088165 command_runner.go:130] >       "binDir": "/opt/cni/bin",
	I0916 10:48:02.670748 2088165 command_runner.go:130] >       "confDir": "/etc/cni/net.d",
	I0916 10:48:02.670754 2088165 command_runner.go:130] >       "maxConfNum": 1,
	I0916 10:48:02.670759 2088165 command_runner.go:130] >       "setupSerially": false,
	I0916 10:48:02.670767 2088165 command_runner.go:130] >       "confTemplate": "",
	I0916 10:48:02.670771 2088165 command_runner.go:130] >       "ipPref": ""
	I0916 10:48:02.670774 2088165 command_runner.go:130] >     },
	I0916 10:48:02.670778 2088165 command_runner.go:130] >     "registry": {
	I0916 10:48:02.670783 2088165 command_runner.go:130] >       "configPath": "/etc/containerd/certs.d",
	I0916 10:48:02.670795 2088165 command_runner.go:130] >       "mirrors": null,
	I0916 10:48:02.670799 2088165 command_runner.go:130] >       "configs": null,
	I0916 10:48:02.670803 2088165 command_runner.go:130] >       "auths": null,
	I0916 10:48:02.670807 2088165 command_runner.go:130] >       "headers": null
	I0916 10:48:02.670810 2088165 command_runner.go:130] >     },
	I0916 10:48:02.670814 2088165 command_runner.go:130] >     "imageDecryption": {
	I0916 10:48:02.670818 2088165 command_runner.go:130] >       "keyModel": "node"
	I0916 10:48:02.670823 2088165 command_runner.go:130] >     },
	I0916 10:48:02.670829 2088165 command_runner.go:130] >     "disableTCPService": true,
	I0916 10:48:02.670833 2088165 command_runner.go:130] >     "streamServerAddress": "",
	I0916 10:48:02.670839 2088165 command_runner.go:130] >     "streamServerPort": "10010",
	I0916 10:48:02.670844 2088165 command_runner.go:130] >     "streamIdleTimeout": "4h0m0s",
	I0916 10:48:02.670850 2088165 command_runner.go:130] >     "enableSelinux": false,
	I0916 10:48:02.670854 2088165 command_runner.go:130] >     "selinuxCategoryRange": 1024,
	I0916 10:48:02.670862 2088165 command_runner.go:130] >     "sandboxImage": "registry.k8s.io/pause:3.10",
	I0916 10:48:02.670867 2088165 command_runner.go:130] >     "statsCollectPeriod": 10,
	I0916 10:48:02.670874 2088165 command_runner.go:130] >     "systemdCgroup": false,
	I0916 10:48:02.670878 2088165 command_runner.go:130] >     "enableTLSStreaming": false,
	I0916 10:48:02.670882 2088165 command_runner.go:130] >     "x509KeyPairStreaming": {
	I0916 10:48:02.670886 2088165 command_runner.go:130] >       "tlsCertFile": "",
	I0916 10:48:02.670889 2088165 command_runner.go:130] >       "tlsKeyFile": ""
	I0916 10:48:02.670893 2088165 command_runner.go:130] >     },
	I0916 10:48:02.670898 2088165 command_runner.go:130] >     "maxContainerLogSize": 16384,
	I0916 10:48:02.670902 2088165 command_runner.go:130] >     "disableCgroup": false,
	I0916 10:48:02.670906 2088165 command_runner.go:130] >     "disableApparmor": false,
	I0916 10:48:02.670913 2088165 command_runner.go:130] >     "restrictOOMScoreAdj": false,
	I0916 10:48:02.670917 2088165 command_runner.go:130] >     "maxConcurrentDownloads": 3,
	I0916 10:48:02.670922 2088165 command_runner.go:130] >     "disableProcMount": false,
	I0916 10:48:02.670928 2088165 command_runner.go:130] >     "unsetSeccompProfile": "",
	I0916 10:48:02.670933 2088165 command_runner.go:130] >     "tolerateMissingHugetlbController": true,
	I0916 10:48:02.670939 2088165 command_runner.go:130] >     "disableHugetlbController": true,
	I0916 10:48:02.670945 2088165 command_runner.go:130] >     "device_ownership_from_security_context": false,
	I0916 10:48:02.670957 2088165 command_runner.go:130] >     "ignoreImageDefinedVolumes": false,
	I0916 10:48:02.670962 2088165 command_runner.go:130] >     "netnsMountsUnderStateDir": false,
	I0916 10:48:02.670966 2088165 command_runner.go:130] >     "enableUnprivilegedPorts": true,
	I0916 10:48:02.670971 2088165 command_runner.go:130] >     "enableUnprivilegedICMP": false,
	I0916 10:48:02.670980 2088165 command_runner.go:130] >     "enableCDI": false,
	I0916 10:48:02.670984 2088165 command_runner.go:130] >     "cdiSpecDirs": [
	I0916 10:48:02.670988 2088165 command_runner.go:130] >       "/etc/cdi",
	I0916 10:48:02.670991 2088165 command_runner.go:130] >       "/var/run/cdi"
	I0916 10:48:02.670994 2088165 command_runner.go:130] >     ],
	I0916 10:48:02.671000 2088165 command_runner.go:130] >     "imagePullProgressTimeout": "5m0s",
	I0916 10:48:02.671007 2088165 command_runner.go:130] >     "drainExecSyncIOTimeout": "0s",
	I0916 10:48:02.671011 2088165 command_runner.go:130] >     "imagePullWithSyncFs": false,
	I0916 10:48:02.671020 2088165 command_runner.go:130] >     "ignoreDeprecationWarnings": null,
	I0916 10:48:02.671026 2088165 command_runner.go:130] >     "containerdRootDir": "/var/lib/containerd",
	I0916 10:48:02.671034 2088165 command_runner.go:130] >     "containerdEndpoint": "/run/containerd/containerd.sock",
	I0916 10:48:02.671039 2088165 command_runner.go:130] >     "rootDir": "/var/lib/containerd/io.containerd.grpc.v1.cri",
	I0916 10:48:02.671045 2088165 command_runner.go:130] >     "stateDir": "/run/containerd/io.containerd.grpc.v1.cri"
	I0916 10:48:02.671048 2088165 command_runner.go:130] >   },
	I0916 10:48:02.671054 2088165 command_runner.go:130] >   "golang": "go1.22.7",
	I0916 10:48:02.671058 2088165 command_runner.go:130] >   "lastCNILoadStatus": "OK",
	I0916 10:48:02.671065 2088165 command_runner.go:130] >   "lastCNILoadStatus.default": "OK"
	I0916 10:48:02.671068 2088165 command_runner.go:130] > }
	I0916 10:48:02.674112 2088165 cni.go:84] Creating CNI manager for ""
	I0916 10:48:02.674135 2088165 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:48:02.674144 2088165 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:48:02.674166 2088165 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8441 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:functional-911502 NodeName:functional-911502 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodP
ath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:48:02.674302 2088165 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8441
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "functional-911502"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8441
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:48:02.674377 2088165 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:48:02.682588 2088165 command_runner.go:130] > kubeadm
	I0916 10:48:02.682607 2088165 command_runner.go:130] > kubectl
	I0916 10:48:02.682612 2088165 command_runner.go:130] > kubelet
	I0916 10:48:02.683858 2088165 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:48:02.683975 2088165 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:48:02.692859 2088165 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (321 bytes)
	I0916 10:48:02.711631 2088165 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:48:02.731842 2088165 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2171 bytes)
	I0916 10:48:02.750860 2088165 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:48:02.754427 2088165 command_runner.go:130] > 192.168.49.2	control-plane.minikube.internal
	I0916 10:48:02.754604 2088165 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:02.864578 2088165 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:02.876577 2088165 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502 for IP: 192.168.49.2
	I0916 10:48:02.876603 2088165 certs.go:194] generating shared ca certs ...
	I0916 10:48:02.876619 2088165 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:02.876757 2088165 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:48:02.876812 2088165 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:48:02.876822 2088165 certs.go:256] generating profile certs ...
	I0916 10:48:02.876912 2088165 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.key
	I0916 10:48:02.877016 2088165 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key.03a9d60c
	I0916 10:48:02.877064 2088165 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key
	I0916 10:48:02.877076 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:48:02.877089 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:48:02.877101 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:48:02.877116 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:48:02.877126 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:48:02.877147 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:48:02.877162 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:48:02.877172 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:48:02.877223 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:48:02.877260 2088165 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:48:02.877272 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:48:02.877297 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:48:02.877326 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:48:02.877351 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:48:02.877396 2088165 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:48:02.877426 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:48:02.877443 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:48:02.877455 2088165 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:02.878009 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:48:02.905336 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:48:02.933262 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:48:02.958253 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:48:02.983055 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:48:03.010360 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:48:03.040350 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:48:03.067337 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:48:03.102805 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:48:03.135061 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:48:03.160952 2088165 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:48:03.185526 2088165 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:48:03.203896 2088165 ssh_runner.go:195] Run: openssl version
	I0916 10:48:03.209344 2088165 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 10:48:03.209746 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:48:03.219896 2088165 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:03.223424 2088165 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:03.223501 2088165 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:03.223587 2088165 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:03.230151 2088165 command_runner.go:130] > b5213941
	I0916 10:48:03.230559 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:48:03.239662 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:48:03.249181 2088165 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:48:03.252963 2088165 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:48:03.252995 2088165 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:48:03.253074 2088165 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:48:03.260600 2088165 command_runner.go:130] > 51391683
	I0916 10:48:03.260681 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:48:03.269973 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:48:03.279717 2088165 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:48:03.283489 2088165 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:48:03.283523 2088165 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:48:03.283579 2088165 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:48:03.290333 2088165 command_runner.go:130] > 3ec20f2e
	I0916 10:48:03.290803 2088165 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:48:03.301183 2088165 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:48:03.304780 2088165 command_runner.go:130] >   File: /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:48:03.304810 2088165 command_runner.go:130] >   Size: 1176      	Blocks: 8          IO Block: 4096   regular file
	I0916 10:48:03.304817 2088165 command_runner.go:130] > Device: 10301h/66305d	Inode: 1081533     Links: 1
	I0916 10:48:03.304824 2088165 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 10:48:03.304830 2088165 command_runner.go:130] > Access: 2024-09-16 10:47:21.714942040 +0000
	I0916 10:48:03.304843 2088165 command_runner.go:130] > Modify: 2024-09-16 10:47:21.714942040 +0000
	I0916 10:48:03.304853 2088165 command_runner.go:130] > Change: 2024-09-16 10:47:21.714942040 +0000
	I0916 10:48:03.304858 2088165 command_runner.go:130] >  Birth: 2024-09-16 10:47:21.714942040 +0000
	I0916 10:48:03.304925 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:48:03.311486 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.311890 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:48:03.318544 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.319055 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:48:03.325922 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.326327 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:48:03.333137 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.333544 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:48:03.339930 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.340335 2088165 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:48:03.346964 2088165 command_runner.go:130] > Certificate will not expire
	I0916 10:48:03.347404 2088165 kubeadm.go:392] StartCluster: {Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISer
verNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirr
or: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:03.347486 2088165 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:48:03.347545 2088165 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:48:03.389466 2088165 command_runner.go:130] > 11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950
	I0916 10:48:03.389521 2088165 command_runner.go:130] > 8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27
	I0916 10:48:03.389673 2088165 command_runner.go:130] > ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b
	I0916 10:48:03.389808 2088165 command_runner.go:130] > 57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6
	I0916 10:48:03.389919 2088165 command_runner.go:130] > a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19
	I0916 10:48:03.390056 2088165 command_runner.go:130] > 492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31
	I0916 10:48:03.390155 2088165 command_runner.go:130] > 31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44
	I0916 10:48:03.390307 2088165 command_runner.go:130] > 928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a
	I0916 10:48:03.393134 2088165 cri.go:89] found id: "11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950"
	I0916 10:48:03.393154 2088165 cri.go:89] found id: "8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27"
	I0916 10:48:03.393158 2088165 cri.go:89] found id: "ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b"
	I0916 10:48:03.393164 2088165 cri.go:89] found id: "57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6"
	I0916 10:48:03.393168 2088165 cri.go:89] found id: "a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	I0916 10:48:03.393171 2088165 cri.go:89] found id: "492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31"
	I0916 10:48:03.393175 2088165 cri.go:89] found id: "31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44"
	I0916 10:48:03.393178 2088165 cri.go:89] found id: "928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a"
	I0916 10:48:03.393182 2088165 cri.go:89] found id: ""
	I0916 10:48:03.393236 2088165 ssh_runner.go:195] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0916 10:48:03.422511 2088165 command_runner.go:130] > [{"ociVersion":"1.0.2-dev","id":"11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","pid":2200,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950/rootfs","created":"2024-09-16T10:47:56.240222491Z","annotations":{"io.kubernetes.cri.container-name":"coredns","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/coredns/coredns:v1.11.3","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"31265291ac7da
492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44","pid":1422,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44/rootfs","created":"2024-09-16T10:47:32.746424926Z","annotations":{"io.kubernetes.cri.container-name":"kube-scheduler","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-scheduler:v1.31.1","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","pid":1960,"status":"running","bundle":"/run/co
ntainerd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c/rootfs","created":"2024-09-16T10:47:45.184280835Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_storage-provisioner_ecac562d-8318-4226-b5f1-61f2c76bb51b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"492408bc37d38a1d8712ef754136e8b589841b3096dee7a1f
a2e6f6b99ce6c31","pid":1486,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31/rootfs","created":"2024-09-16T10:47:32.919386537Z","annotations":{"io.kubernetes.cri.container-name":"etcd","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/etcd:3.5.15-0","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","pid":1315,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e97
13de051f556893ed4577eb8b5d9d38835da8d64517b7a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a/rootfs","created":"2024-09-16T10:47:32.575723929Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"256","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-apiserver-functional-911502_846e81f7bcac6804cf5ef499ea5ac265","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","pid":1316,"status":"running
","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f/rootfs","created":"2024-09-16T10:47:32.562785558Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"204","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-controller-manager-functional-911502_26c4a2e985a1c721e0411e5d9497a35b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev
","id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","pid":1338,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7/rootfs","created":"2024-09-16T10:47:32.61203341Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-scheduler-functional-911502_69dae9ff35c780f43a15f539d6f19e46","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbo
x-uid":"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","pid":1792,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6/rootfs","created":"2024-09-16T10:47:44.701696235Z","annotations":{"io.kubernetes.cri.container-name":"kube-proxy","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-proxy:v1.31.1","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"8aede76947864ca07593bc24b939a
29faf7bb7dd85244f30f18f232f3ec1ea27","pid":2092,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27/rootfs","created":"2024-09-16T10:47:45.396938875Z","annotations":{"io.kubernetes.cri.container-name":"storage-provisioner","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"gcr.io/k8s-minikube/storage-provisioner:v5","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","pid":1399,"status":"running","bundle":"/run/containerd/io.cont
ainerd.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a/rootfs","created":"2024-09-16T10:47:32.722647206Z","annotations":{"io.kubernetes.cri.container-name":"kube-controller-manager","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-controller-manager:v1.31.1","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","pid":2166,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b
890b3ee6fc0070983ddfd","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd/rootfs","created":"2024-09-16T10:47:56.156755788Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_coredns-7c65d6cfc9-6kw9d_072167c7-fa1a-463e-a957-91ea24020387","io.kubernetes.cri.sandbox-memory":"178257920","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","pid":1479,"status":"running","bundle":"/run/contain
erd/io.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19/rootfs","created":"2024-09-16T10:47:32.854949973Z","annotations":{"io.kubernetes.cri.container-name":"kube-apiserver","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-apiserver:v1.31.1","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","pid":1768,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891
ab78e","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e/rootfs","created":"2024-09-16T10:47:44.62156803Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"10000","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kindnet-7r2rg_ab52a601-e0fe-4f60-a202-477487da9bb2","io.kubernetes.cri.sandbox-memory":"52428800","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","pid":1733,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8
s.io/c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1/rootfs","created":"2024-09-16T10:47:44.525242585Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-proxy-l59dx_72a26843-9f97-4121-91f0-3cb389048315","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","pid":1835,"status":"running",
"bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b/rootfs","created":"2024-09-16T10:47:44.827883907Z","annotations":{"io.kubernetes.cri.container-name":"kindnet-cni","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"docker.io/kindest/kindnetd:v20240813-c6f155d6","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","pid":1295,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d4
8ffedbe08802f86e","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e/rootfs","created":"2024-09-16T10:47:32.534113117Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_etcd-functional-911502_c7ab8017ca620f2ba7e026f4cdb427a2","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"}]
	I0916 10:48:03.424698 2088165 cri.go:116] JSON = [{"ociVersion":"1.0.2-dev","id":"11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","pid":2200,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950/rootfs","created":"2024-09-16T10:47:56.240222491Z","annotations":{"io.kubernetes.cri.container-name":"coredns","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/coredns/coredns:v1.11.3","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"31265291ac7da492c3c
fad84540ba2b684cdf0abad82be5c56d392df7613dc44","pid":1422,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44/rootfs","created":"2024-09-16T10:47:32.746424926Z","annotations":{"io.kubernetes.cri.container-name":"kube-scheduler","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-scheduler:v1.31.1","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","pid":1960,"status":"running","bundle":"/run/containe
rd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c/rootfs","created":"2024-09-16T10:47:45.184280835Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_storage-provisioner_ecac562d-8318-4226-b5f1-61f2c76bb51b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6
b99ce6c31","pid":1486,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31/rootfs","created":"2024-09-16T10:47:32.919386537Z","annotations":{"io.kubernetes.cri.container-name":"etcd","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/etcd:3.5.15-0","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","pid":1315,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de05
1f556893ed4577eb8b5d9d38835da8d64517b7a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a/rootfs","created":"2024-09-16T10:47:32.575723929Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"256","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-apiserver-functional-911502_846e81f7bcac6804cf5ef499ea5ac265","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","pid":1316,"status":"running","bun
dle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f/rootfs","created":"2024-09-16T10:47:32.562785558Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"204","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-controller-manager-functional-911502_26c4a2e985a1c721e0411e5d9497a35b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id"
:"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","pid":1338,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7/rootfs","created":"2024-09-16T10:47:32.61203341Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-scheduler-functional-911502_69dae9ff35c780f43a15f539d6f19e46","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid"
:"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","pid":1792,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6/rootfs","created":"2024-09-16T10:47:44.701696235Z","annotations":{"io.kubernetes.cri.container-name":"kube-proxy","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-proxy:v1.31.1","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"8aede76947864ca07593bc24b939a29faf7
bb7dd85244f30f18f232f3ec1ea27","pid":2092,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27/rootfs","created":"2024-09-16T10:47:45.396938875Z","annotations":{"io.kubernetes.cri.container-name":"storage-provisioner","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"gcr.io/k8s-minikube/storage-provisioner:v5","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","pid":1399,"status":"running","bundle":"/run/containerd/io.containerd
.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a/rootfs","created":"2024-09-16T10:47:32.722647206Z","annotations":{"io.kubernetes.cri.container-name":"kube-controller-manager","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-controller-manager:v1.31.1","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","pid":2166,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3e
e6fc0070983ddfd","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd/rootfs","created":"2024-09-16T10:47:56.156755788Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_coredns-7c65d6cfc9-6kw9d_072167c7-fa1a-463e-a957-91ea24020387","io.kubernetes.cri.sandbox-memory":"178257920","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","pid":1479,"status":"running","bundle":"/run/containerd/io
.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19/rootfs","created":"2024-09-16T10:47:32.854949973Z","annotations":{"io.kubernetes.cri.container-name":"kube-apiserver","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-apiserver:v1.31.1","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","pid":1768,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e"
,"rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e/rootfs","created":"2024-09-16T10:47:44.62156803Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"10000","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kindnet-7r2rg_ab52a601-e0fe-4f60-a202-477487da9bb2","io.kubernetes.cri.sandbox-memory":"52428800","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","pid":1733,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c
900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1/rootfs","created":"2024-09-16T10:47:44.525242585Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-proxy-l59dx_72a26843-9f97-4121-91f0-3cb389048315","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","pid":1835,"status":"running","bundl
e":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b/rootfs","created":"2024-09-16T10:47:44.827883907Z","annotations":{"io.kubernetes.cri.container-name":"kindnet-cni","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"docker.io/kindest/kindnetd:v20240813-c6f155d6","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","pid":1295,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedb
e08802f86e","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e/rootfs","created":"2024-09-16T10:47:32.534113117Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_etcd-functional-911502_c7ab8017ca620f2ba7e026f4cdb427a2","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"}]
	I0916 10:48:03.424996 2088165 cri.go:126] list returned 16 containers
	I0916 10:48:03.425013 2088165 cri.go:129] container: {ID:11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 Status:running}
	I0916 10:48:03.425028 2088165 cri.go:135] skipping {11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 running}: state = "running", want "paused"
	I0916 10:48:03.425036 2088165 cri.go:129] container: {ID:31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 Status:running}
	I0916 10:48:03.425042 2088165 cri.go:135] skipping {31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 running}: state = "running", want "paused"
	I0916 10:48:03.425051 2088165 cri.go:129] container: {ID:334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c Status:running}
	I0916 10:48:03.425058 2088165 cri.go:131] skipping 334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c - not in ps
	I0916 10:48:03.425065 2088165 cri.go:129] container: {ID:492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 Status:running}
	I0916 10:48:03.425072 2088165 cri.go:135] skipping {492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 running}: state = "running", want "paused"
	I0916 10:48:03.425077 2088165 cri.go:129] container: {ID:51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a Status:running}
	I0916 10:48:03.425082 2088165 cri.go:131] skipping 51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a - not in ps
	I0916 10:48:03.425092 2088165 cri.go:129] container: {ID:54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f Status:running}
	I0916 10:48:03.425097 2088165 cri.go:131] skipping 54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f - not in ps
	I0916 10:48:03.425102 2088165 cri.go:129] container: {ID:578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7 Status:running}
	I0916 10:48:03.425109 2088165 cri.go:131] skipping 578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7 - not in ps
	I0916 10:48:03.425112 2088165 cri.go:129] container: {ID:57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 Status:running}
	I0916 10:48:03.425118 2088165 cri.go:135] skipping {57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 running}: state = "running", want "paused"
	I0916 10:48:03.425125 2088165 cri.go:129] container: {ID:8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 Status:running}
	I0916 10:48:03.425134 2088165 cri.go:135] skipping {8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 running}: state = "running", want "paused"
	I0916 10:48:03.425139 2088165 cri.go:129] container: {ID:928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a Status:running}
	I0916 10:48:03.425146 2088165 cri.go:135] skipping {928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a running}: state = "running", want "paused"
	I0916 10:48:03.425152 2088165 cri.go:129] container: {ID:95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd Status:running}
	I0916 10:48:03.425159 2088165 cri.go:131] skipping 95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd - not in ps
	I0916 10:48:03.425163 2088165 cri.go:129] container: {ID:a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 Status:running}
	I0916 10:48:03.425169 2088165 cri.go:135] skipping {a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 running}: state = "running", want "paused"
	I0916 10:48:03.425176 2088165 cri.go:129] container: {ID:b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e Status:running}
	I0916 10:48:03.425184 2088165 cri.go:131] skipping b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e - not in ps
	I0916 10:48:03.425189 2088165 cri.go:129] container: {ID:c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1 Status:running}
	I0916 10:48:03.425193 2088165 cri.go:131] skipping c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1 - not in ps
	I0916 10:48:03.425197 2088165 cri.go:129] container: {ID:ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b Status:running}
	I0916 10:48:03.425204 2088165 cri.go:135] skipping {ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b running}: state = "running", want "paused"
	I0916 10:48:03.425209 2088165 cri.go:129] container: {ID:e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e Status:running}
	I0916 10:48:03.425215 2088165 cri.go:131] skipping e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e - not in ps
	I0916 10:48:03.425270 2088165 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:48:03.434943 2088165 command_runner.go:130] > /var/lib/kubelet/config.yaml
	I0916 10:48:03.434970 2088165 command_runner.go:130] > /var/lib/kubelet/kubeadm-flags.env
	I0916 10:48:03.434977 2088165 command_runner.go:130] > /var/lib/minikube/etcd:
	I0916 10:48:03.434980 2088165 command_runner.go:130] > member
	I0916 10:48:03.434999 2088165 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 10:48:03.435005 2088165 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 10:48:03.435058 2088165 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 10:48:03.444332 2088165 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:48:03.444876 2088165 kubeconfig.go:125] found "functional-911502" server: "https://192.168.49.2:8441"
	I0916 10:48:03.445356 2088165 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:03.445657 2088165 kapi.go:59] client config for functional-911502: &rest.Config{Host:"https://192.168.49.2:8441", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(ni
l), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:48:03.446313 2088165 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 10:48:03.446395 2088165 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 10:48:03.457796 2088165 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.49.2
	I0916 10:48:03.457836 2088165 kubeadm.go:597] duration metric: took 22.82566ms to restartPrimaryControlPlane
	I0916 10:48:03.457847 2088165 kubeadm.go:394] duration metric: took 110.449114ms to StartCluster
	I0916 10:48:03.457881 2088165 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:03.457963 2088165 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:03.458667 2088165 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:03.458968 2088165 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:48:03.459354 2088165 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:03.459393 2088165 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:48:03.459499 2088165 addons.go:69] Setting storage-provisioner=true in profile "functional-911502"
	I0916 10:48:03.459522 2088165 addons.go:234] Setting addon storage-provisioner=true in "functional-911502"
	W0916 10:48:03.459531 2088165 addons.go:243] addon storage-provisioner should already be in state true
	I0916 10:48:03.459557 2088165 addons.go:69] Setting default-storageclass=true in profile "functional-911502"
	I0916 10:48:03.459594 2088165 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "functional-911502"
	I0916 10:48:03.459610 2088165 host.go:66] Checking if "functional-911502" exists ...
	I0916 10:48:03.459971 2088165 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:03.460078 2088165 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:03.463751 2088165 out.go:177] * Verifying Kubernetes components...
	I0916 10:48:03.465786 2088165 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:03.492327 2088165 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:03.492591 2088165 kapi.go:59] client config for functional-911502: &rest.Config{Host:"https://192.168.49.2:8441", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(ni
l), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:48:03.492857 2088165 addons.go:234] Setting addon default-storageclass=true in "functional-911502"
	W0916 10:48:03.492869 2088165 addons.go:243] addon default-storageclass should already be in state true
	I0916 10:48:03.492895 2088165 host.go:66] Checking if "functional-911502" exists ...
	I0916 10:48:03.493327 2088165 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:03.495766 2088165 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:48:03.497894 2088165 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:03.497918 2088165 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:48:03.497988 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:03.524383 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:03.535658 2088165 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:03.535679 2088165 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:48:03.535743 2088165 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:03.559290 2088165 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:03.630778 2088165 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:03.645288 2088165 node_ready.go:35] waiting up to 6m0s for node "functional-911502" to be "Ready" ...
	I0916 10:48:03.645411 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:03.645423 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.645432 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.645436 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.660535 2088165 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 10:48:03.660564 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.660573 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.660577 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.660580 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.660583 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.660588 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.660590 2088165 round_trippers.go:580]     Audit-Id: 7d3a4638-e932-4d74-b644-bf06bf86d216
	I0916 10:48:03.660740 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:03.661523 2088165 node_ready.go:49] node "functional-911502" has status "Ready":"True"
	I0916 10:48:03.661547 2088165 node_ready.go:38] duration metric: took 16.226104ms for node "functional-911502" to be "Ready" ...
	I0916 10:48:03.661558 2088165 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:03.661622 2088165 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 10:48:03.661642 2088165 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 10:48:03.661697 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:03.661714 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.661729 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.661738 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.665791 2088165 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:48:03.665823 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.665831 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.665835 2088165 round_trippers.go:580]     Audit-Id: bd947b24-7cff-4e2d-bf6a-a19671aac2cf
	I0916 10:48:03.665838 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.665841 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.665844 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.665846 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.666612 2088165 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-6kw9d","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"072167c7-fa1a-463e-a957-91ea24020387","resourceVersion":"413","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a7b12c6a-a15b-40a7-bca2-b089a82851d2","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a7b12c6a-a15b-40a7-bca2-b089a82851d2\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 58828 chars]
	I0916 10:48:03.671275 2088165 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.671383 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-6kw9d
	I0916 10:48:03.671392 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.671401 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.671405 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.673677 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.673696 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.673704 2088165 round_trippers.go:580]     Audit-Id: 7a50e2b3-41d6-4368-b716-123bfea51442
	I0916 10:48:03.673717 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.673720 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.673723 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.673726 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.673730 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.674041 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-6kw9d","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"072167c7-fa1a-463e-a957-91ea24020387","resourceVersion":"413","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a7b12c6a-a15b-40a7-bca2-b089a82851d2","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a7b12c6a-a15b-40a7-bca2-b089a82851d2\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6481 chars]
	I0916 10:48:03.674594 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:03.674614 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.674624 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.674629 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.676727 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.676748 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.676757 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.676762 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.676765 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.676768 2088165 round_trippers.go:580]     Audit-Id: 939eac82-f066-4f20-a04c-bd069cb1f232
	I0916 10:48:03.676771 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.676773 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.677209 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:03.677592 2088165 pod_ready.go:93] pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:03.677612 2088165 pod_ready.go:82] duration metric: took 6.30474ms for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.677622 2088165 pod_ready.go:79] waiting up to 6m0s for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.677684 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/etcd-functional-911502
	I0916 10:48:03.677695 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.677703 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.677708 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.679894 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.679927 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.679935 2088165 round_trippers.go:580]     Audit-Id: 52c0bb97-d0e8-458d-95af-3a903313478c
	I0916 10:48:03.679940 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.679944 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.679947 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.679950 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.679953 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.680607 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-functional-911502","namespace":"kube-system","uid":"9cccef26-ac83-485f-a6ae-2017f0ff645b","resourceVersion":"402","creationTimestamp":"2024-09-16T10:47:37Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.49.2:2379","kubernetes.io/config.hash":"c7ab8017ca620f2ba7e026f4cdb427a2","kubernetes.io/config.mirror":"c7ab8017ca620f2ba7e026f4cdb427a2","kubernetes.io/config.seen":"2024-09-16T10:47:31.996556421Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-
client-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/confi [truncated 6445 chars]
	I0916 10:48:03.681113 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:03.681130 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.681139 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.681143 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.683628 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.683649 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.683657 2088165 round_trippers.go:580]     Audit-Id: 04d2b56a-1fb5-44bb-a456-db9c38ec437a
	I0916 10:48:03.683661 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.683664 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.683667 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.683670 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.683673 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.684541 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:03.684931 2088165 pod_ready.go:93] pod "etcd-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:03.684956 2088165 pod_ready.go:82] duration metric: took 7.326452ms for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.684970 2088165 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.685043 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-911502
	I0916 10:48:03.685054 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.685062 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.685068 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.687419 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.687440 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.687448 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.687466 2088165 round_trippers.go:580]     Audit-Id: bcc7e83b-5656-4754-9aba-3b769e3df8ca
	I0916 10:48:03.687473 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.687476 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.687482 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.687485 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.688021 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-functional-911502","namespace":"kube-system","uid":"d399bd77-51dd-4ad3-90d4-6cf11e9e156e","resourceVersion":"301","creationTimestamp":"2024-09-16T10:47:39Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.49.2:8441","kubernetes.io/config.hash":"846e81f7bcac6804cf5ef499ea5ac265","kubernetes.io/config.mirror":"846e81f7bcac6804cf5ef499ea5ac265","kubernetes.io/config.seen":"2024-09-16T10:47:39.051012852Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:39Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.ku
bernetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes [truncated 8521 chars]
	I0916 10:48:03.688559 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:03.688576 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.688584 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.688589 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.690702 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.690720 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.690728 2088165 round_trippers.go:580]     Audit-Id: 3b495a44-0202-4d14-b96c-97f07d2bc499
	I0916 10:48:03.690733 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.690753 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.690763 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.690766 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.690769 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.691282 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:03.691685 2088165 pod_ready.go:93] pod "kube-apiserver-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:03.691714 2088165 pod_ready.go:82] duration metric: took 6.734169ms for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.691726 2088165 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.691797 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-911502
	I0916 10:48:03.691807 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.691815 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.691820 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.694012 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.694032 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.694041 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.694045 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.694048 2088165 round_trippers.go:580]     Audit-Id: 60a5cbd4-cb73-46d2-a052-ab9b16a0df91
	I0916 10:48:03.694063 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.694072 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.694075 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.694623 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-functional-911502","namespace":"kube-system","uid":"60f8d5ef-11df-400e-bce8-00ed7502b8c7","resourceVersion":"397","creationTimestamp":"2024-09-16T10:47:39Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"26c4a2e985a1c721e0411e5d9497a35b","kubernetes.io/config.mirror":"26c4a2e985a1c721e0411e5d9497a35b","kubernetes.io/config.seen":"2024-09-16T10:47:39.051014378Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:39Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes
.io/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{"." [truncated 8096 chars]
	I0916 10:48:03.695213 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:03.695233 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.695242 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.695246 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.697778 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.697799 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.697808 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.697813 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.697816 2088165 round_trippers.go:580]     Audit-Id: d25b7667-3d27-4852-a1db-acd4e3def388
	I0916 10:48:03.697819 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.697821 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.697824 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.698450 2088165 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:03.699849 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:03.700223 2088165 pod_ready.go:93] pod "kube-controller-manager-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:03.700239 2088165 pod_ready.go:82] duration metric: took 8.504824ms for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.700250 2088165 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:03.717382 2088165 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:03.845928 2088165 request.go:632] Waited for 145.611246ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-proxy-l59dx
	I0916 10:48:03.846018 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-proxy-l59dx
	I0916 10:48:03.846043 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:03.846052 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:03.846063 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:03.848622 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:03.848646 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:03.848655 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:03.848659 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:03 GMT
	I0916 10:48:03.848663 2088165 round_trippers.go:580]     Audit-Id: 56857d5c-2cf6-48d4-a5e5-52df385277e0
	I0916 10:48:03.848674 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:03.848677 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:03.848681 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:03.849192 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-l59dx","generateName":"kube-proxy-","namespace":"kube-system","uid":"72a26843-9f97-4121-91f0-3cb389048315","resourceVersion":"381","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"35f9ba26-390b-4032-b0bf-72d6e30119ee","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"35f9ba26-390b-4032-b0bf-72d6e30119ee\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6177 chars]
	I0916 10:48:04.046546 2088165 request.go:632] Waited for 196.765909ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:04.046620 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:04.046630 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.046639 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.046653 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.049353 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.049430 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.049453 2088165 round_trippers.go:580]     Audit-Id: 84721d2e-7d21-4d43-a34b-77d002ffe4f4
	I0916 10:48:04.049472 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.049569 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.049597 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.049614 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.049630 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.049811 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:04.050261 2088165 pod_ready.go:93] pod "kube-proxy-l59dx" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:04.050286 2088165 pod_ready.go:82] duration metric: took 350.026689ms for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:04.050299 2088165 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:04.127576 2088165 command_runner.go:130] > serviceaccount/storage-provisioner unchanged
	I0916 10:48:04.156134 2088165 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner unchanged
	I0916 10:48:04.180061 2088165 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner unchanged
	I0916 10:48:04.203294 2088165 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner unchanged
	I0916 10:48:04.245493 2088165 request.go:632] Waited for 195.09035ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-911502
	I0916 10:48:04.245592 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-911502
	I0916 10:48:04.245606 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.245616 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.245639 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.248067 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.248137 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.248159 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.248179 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.248193 2088165 round_trippers.go:580]     Audit-Id: 53f981f0-e971-4733-bf38-42a727012b1f
	I0916 10:48:04.248219 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.248235 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.248249 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.249038 2088165 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-functional-911502","namespace":"kube-system","uid":"7da8ecbb-189d-4ed2-bcbe-69ef483b67e8","resourceVersion":"359","creationTimestamp":"2024-09-16T10:47:39Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"69dae9ff35c780f43a15f539d6f19e46","kubernetes.io/config.mirror":"69dae9ff35c780f43a15f539d6f19e46","kubernetes.io/config.seen":"2024-09-16T10:47:39.051015494Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:39Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{
},"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component": [truncated 4978 chars]
	I0916 10:48:04.313333 2088165 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath unchanged
	I0916 10:48:04.432371 2088165 command_runner.go:130] > pod/storage-provisioner configured
	I0916 10:48:04.437599 2088165 command_runner.go:130] > storageclass.storage.k8s.io/standard unchanged
	I0916 10:48:04.437721 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/apis/storage.k8s.io/v1/storageclasses
	I0916 10:48:04.437734 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.437744 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.437749 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.440211 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.440235 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.440244 2088165 round_trippers.go:580]     Content-Length: 1273
	I0916 10:48:04.440250 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.440253 2088165 round_trippers.go:580]     Audit-Id: 6c394b0f-6b8a-4424-8a7f-4ad929b8b1e5
	I0916 10:48:04.440256 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.440259 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.440274 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.440281 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.440348 2088165 request.go:1351] Response Body: {"kind":"StorageClassList","apiVersion":"storage.k8s.io/v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"standard","uid":"09dcbef5-fa0f-4e66-b848-ffef0edc1433","resourceVersion":"340","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kuberne
tes.io/last-applied-configuration":{},"f:storageclass.kubernetes.io/is- [truncated 249 chars]
	I0916 10:48:04.440831 2088165 request.go:1351] Request Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"09dcbef5-fa0f-4e66-b848-ffef0edc1433","resourceVersion":"340","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storageclas
s.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 10:48:04.440888 2088165 round_trippers.go:463] PUT https://192.168.49.2:8441/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 10:48:04.440899 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.440907 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.440913 2088165 round_trippers.go:473]     Content-Type: application/json
	I0916 10:48:04.440924 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.443808 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.443831 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.443840 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.443844 2088165 round_trippers.go:580]     Audit-Id: b8d9cfe0-12be-4fe1-9c8a-fc402f8daf5b
	I0916 10:48:04.443848 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.443868 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.443877 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.443880 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.443883 2088165 round_trippers.go:580]     Content-Length: 1220
	I0916 10:48:04.443917 2088165 request.go:1351] Response Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"09dcbef5-fa0f-4e66-b848-ffef0edc1433","resourceVersion":"340","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storagecla
ss.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 10:48:04.445980 2088165 request.go:632] Waited for 196.425792ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:04.446033 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes/functional-911502
	I0916 10:48:04.446044 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.446053 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.446057 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.446920 2088165 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 10:48:04.448570 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.448589 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.448597 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.448602 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.448605 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.448608 2088165 round_trippers.go:580]     Audit-Id: a457554d-37d8-4e36-99a3-a36abe6cba96
	I0916 10:48:04.448611 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.448613 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.448739 2088165 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Upd
ate","apiVersion":"v1","time":"2024-09-16T10:47:36Z","fieldsType":"Fiel [truncated 5107 chars]
	I0916 10:48:04.448976 2088165 addons.go:510] duration metric: took 989.576689ms for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 10:48:04.449161 2088165 pod_ready.go:93] pod "kube-scheduler-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:04.449175 2088165 pod_ready.go:82] duration metric: took 398.850709ms for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:04.449185 2088165 pod_ready.go:39] duration metric: took 787.616557ms for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:04.449204 2088165 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:48:04.449265 2088165 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:48:04.459023 2088165 command_runner.go:130] > 1479
	I0916 10:48:04.460316 2088165 api_server.go:72] duration metric: took 1.001309547s to wait for apiserver process to appear ...
	I0916 10:48:04.460338 2088165 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:48:04.460359 2088165 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:04.468569 2088165 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
	ok
	I0916 10:48:04.468653 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/version
	I0916 10:48:04.468664 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.468675 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.468678 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.470099 2088165 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:48:04.470133 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.470140 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.470146 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.470149 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.470153 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.470155 2088165 round_trippers.go:580]     Content-Length: 263
	I0916 10:48:04.470158 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.470161 2088165 round_trippers.go:580]     Audit-Id: 3d925272-5044-4fbe-9e45-9b830c352d6d
	I0916 10:48:04.470177 2088165 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 10:48:04.470300 2088165 api_server.go:141] control plane version: v1.31.1
	I0916 10:48:04.470320 2088165 api_server.go:131] duration metric: took 9.975609ms to wait for apiserver health ...
	I0916 10:48:04.470332 2088165 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:48:04.645543 2088165 request.go:632] Waited for 175.134103ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:04.645629 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:04.645642 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.645651 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.645657 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.648503 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.648539 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.648549 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.648555 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.648559 2088165 round_trippers.go:580]     Audit-Id: 276c41ab-f18c-4cd0-b778-e7e69343c450
	I0916 10:48:04.648561 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.648564 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.648567 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.649675 2088165 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-6kw9d","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"072167c7-fa1a-463e-a957-91ea24020387","resourceVersion":"413","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a7b12c6a-a15b-40a7-bca2-b089a82851d2","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a7b12c6a-a15b-40a7-bca2-b089a82851d2\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 58828 chars]
	I0916 10:48:04.652245 2088165 system_pods.go:59] 8 kube-system pods found
	I0916 10:48:04.652287 2088165 system_pods.go:61] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running
	I0916 10:48:04.652297 2088165 system_pods.go:61] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running
	I0916 10:48:04.652302 2088165 system_pods.go:61] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:04.652315 2088165 system_pods.go:61] "kube-apiserver-functional-911502" [d399bd77-51dd-4ad3-90d4-6cf11e9e156e] Running
	I0916 10:48:04.652320 2088165 system_pods.go:61] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running
	I0916 10:48:04.652324 2088165 system_pods.go:61] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:04.652329 2088165 system_pods.go:61] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running
	I0916 10:48:04.652337 2088165 system_pods.go:61] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running
	I0916 10:48:04.652343 2088165 system_pods.go:74] duration metric: took 182.003926ms to wait for pod list to return data ...
	I0916 10:48:04.652354 2088165 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:48:04.845838 2088165 request.go:632] Waited for 193.389585ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/namespaces/default/serviceaccounts
	I0916 10:48:04.845922 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/default/serviceaccounts
	I0916 10:48:04.845934 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:04.845943 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:04.845950 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:04.848728 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:04.848753 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:04.848763 2088165 round_trippers.go:580]     Audit-Id: b1d51c6d-03dd-45c8-99f8-5de75e4ae5eb
	I0916 10:48:04.848768 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:04.848772 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:04.848776 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:04.848779 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:04.848781 2088165 round_trippers.go:580]     Content-Length: 261
	I0916 10:48:04.848784 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:04 GMT
	I0916 10:48:04.848844 2088165 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"edf53d44-fa4f-4597-be01-9bceb291c079","resourceVersion":"304","creationTimestamp":"2024-09-16T10:47:43Z"}}]}
	I0916 10:48:04.849067 2088165 default_sa.go:45] found service account: "default"
	I0916 10:48:04.849091 2088165 default_sa.go:55] duration metric: took 196.730906ms for default service account to be created ...
	I0916 10:48:04.849104 2088165 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:48:05.045482 2088165 request.go:632] Waited for 196.308672ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:05.045591 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/namespaces/kube-system/pods
	I0916 10:48:05.045604 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:05.045613 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.045632 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.048822 2088165 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:48:05.048888 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:05.048910 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.048928 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:05.048943 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:05.048971 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.048993 2088165 round_trippers.go:580]     Audit-Id: 8a307c14-81a7-4b63-89d0-6e81f4952347
	I0916 10:48:05.049008 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.049540 2088165 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-6kw9d","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"072167c7-fa1a-463e-a957-91ea24020387","resourceVersion":"413","creationTimestamp":"2024-09-16T10:47:44Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"a7b12c6a-a15b-40a7-bca2-b089a82851d2","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T10:47:44Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"a7b12c6a-a15b-40a7-bca2-b089a82851d2\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 58828 chars]
	I0916 10:48:05.052262 2088165 system_pods.go:86] 8 kube-system pods found
	I0916 10:48:05.052310 2088165 system_pods.go:89] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running
	I0916 10:48:05.052318 2088165 system_pods.go:89] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running
	I0916 10:48:05.052323 2088165 system_pods.go:89] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:05.052329 2088165 system_pods.go:89] "kube-apiserver-functional-911502" [d399bd77-51dd-4ad3-90d4-6cf11e9e156e] Running
	I0916 10:48:05.052335 2088165 system_pods.go:89] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running
	I0916 10:48:05.052340 2088165 system_pods.go:89] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:05.052344 2088165 system_pods.go:89] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running
	I0916 10:48:05.052350 2088165 system_pods.go:89] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running
	I0916 10:48:05.052359 2088165 system_pods.go:126] duration metric: took 203.244785ms to wait for k8s-apps to be running ...
	I0916 10:48:05.052370 2088165 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:48:05.052438 2088165 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:48:05.064597 2088165 system_svc.go:56] duration metric: took 12.217908ms WaitForService to wait for kubelet
	I0916 10:48:05.064684 2088165 kubeadm.go:582] duration metric: took 1.605680381s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:05.064710 2088165 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:48:05.246151 2088165 request.go:632] Waited for 181.330182ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8441/api/v1/nodes
	I0916 10:48:05.246216 2088165 round_trippers.go:463] GET https://192.168.49.2:8441/api/v1/nodes
	I0916 10:48:05.246223 2088165 round_trippers.go:469] Request Headers:
	I0916 10:48:05.246232 2088165 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:48:05.246242 2088165 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:48:05.248710 2088165 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:48:05.248735 2088165 round_trippers.go:577] Response Headers:
	I0916 10:48:05.248744 2088165 round_trippers.go:580]     Content-Type: application/json
	I0916 10:48:05.248748 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 2195ecf9-ccbd-416a-bcf1-e3903340ca44
	I0916 10:48:05.248751 2088165 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: b1fdf380-80ee-4488-90a1-0bf453c0d959
	I0916 10:48:05.248754 2088165 round_trippers.go:580]     Date: Mon, 16 Sep 2024 10:48:05 GMT
	I0916 10:48:05.248757 2088165 round_trippers.go:580]     Audit-Id: fdca99b8-ca8f-4880-bfe3-aa5f1a024c7b
	I0916 10:48:05.248759 2088165 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 10:48:05.248955 2088165 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"424"},"items":[{"metadata":{"name":"functional-911502","uid":"54f5a073-de63-45af-94a0-bc145f0d2e2f","resourceVersion":"400","creationTimestamp":"2024-09-16T10:47:36Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"functional-911502","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"functional-911502","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T10_47_40_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"ma
nagedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v [truncated 5160 chars]
	I0916 10:48:05.249387 2088165 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:48:05.249421 2088165 node_conditions.go:123] node cpu capacity is 2
	I0916 10:48:05.249432 2088165 node_conditions.go:105] duration metric: took 184.716467ms to run NodePressure ...
	I0916 10:48:05.249448 2088165 start.go:241] waiting for startup goroutines ...
	I0916 10:48:05.249463 2088165 start.go:246] waiting for cluster config update ...
	I0916 10:48:05.249474 2088165 start.go:255] writing updated cluster config ...
	I0916 10:48:05.249777 2088165 ssh_runner.go:195] Run: rm -f paused
	I0916 10:48:05.256548 2088165 out.go:177] * Done! kubectl is now configured to use "functional-911502" cluster and "default" namespace by default
	E0916 10:48:05.259198 2088165 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	11757969f67eb       2f6c962e7b831       12 seconds ago      Running             coredns                   0                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	8aede76947864       ba04bb24b9575       23 seconds ago      Running             storage-provisioner       0                   334ec243859df       storage-provisioner
	ce5a28d1cb5d2       6a23fa8fd2b78       23 seconds ago      Running             kindnet-cni               0                   b400f9b4bc923       kindnet-7r2rg
	57c3cd94d0c59       24a140c548c07       23 seconds ago      Running             kube-proxy                0                   c900cfd22280f       kube-proxy-l59dx
	a27dc93745c62       d3f53a98c0a9d       35 seconds ago      Running             kube-apiserver            0                   51fb442d9d3e0       kube-apiserver-functional-911502
	492408bc37d38       27e3830e14027       35 seconds ago      Running             etcd                      0                   e43a7a67672f1       etcd-functional-911502
	31265291ac7da       7f8aa378bb47d       35 seconds ago      Running             kube-scheduler            0                   578f22ca4016c       kube-scheduler-functional-911502
	928f2c64d0a66       279f381cb3736       35 seconds ago      Running             kube-controller-manager   0                   54de1abbce22f       kube-controller-manager-functional-911502
	
	
	==> containerd <==
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.318999212Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319012922Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319080089Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319102842Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319116610Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319130665Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319142275Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319161590Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319174160Z" level=info msg="NRI interface is disabled by configuration."
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319185828Z" level=info msg="loading plugin \"io.containerd.grpc.v1.cri\"..." type=io.containerd.grpc.v1
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319497194Z" level=info msg="Start cri plugin with config {PluginConfig:{ContainerdConfig:{Snapshotter:overlayfs DefaultRuntimeName:runc DefaultRuntime:{Type: Path: Engine: PodAnnotations:[] ContainerAnnotations:[] Root: Options:map[] PrivilegedWithoutHostDevices:false PrivilegedWithoutHostDevicesAllDevicesAllowed:false BaseRuntimeSpec: NetworkPluginConfDir: NetworkPluginMaxConfNum:0 Snapshotter: SandboxMode:} UntrustedWorkloadRuntime:{Type: Path: Engine: PodAnnotations:[] ContainerAnnotations:[] Root: Options:map[] PrivilegedWithoutHostDevices:false PrivilegedWithoutHostDevicesAllDevicesAllowed:false BaseRuntimeSpec: NetworkPluginConfDir: NetworkPluginMaxConfNum:0 Snapshotter: SandboxMode:} Runtimes:map[runc:{Type:io.containerd.runc.v2 Path: Engine: PodAnnotations:[] ContainerAnnotations:[] Root: Options:map[SystemdCgroup:false] PrivilegedWithoutHostDevices:false PrivilegedWithoutHostDevicesAllDevicesAllowed:false BaseRunti
meSpec: NetworkPluginConfDir: NetworkPluginMaxConfNum:0 Snapshotter: SandboxMode:podsandbox}] NoPivot:false DisableSnapshotAnnotations:true DiscardUnpackedLayers:true IgnoreBlockIONotEnabledErrors:false IgnoreRdtNotEnabledErrors:false} CniConfig:{NetworkPluginBinDir:/opt/cni/bin NetworkPluginConfDir:/etc/cni/net.d NetworkPluginMaxConfNum:1 NetworkPluginSetupSerially:false NetworkPluginConfTemplate: IPPreference:} Registry:{ConfigPath:/etc/containerd/certs.d Mirrors:map[] Configs:map[] Auths:map[] Headers:map[]} ImageDecryption:{KeyModel:node} DisableTCPService:true StreamServerAddress: StreamServerPort:10010 StreamIdleTimeout:4h0m0s EnableSelinux:false SelinuxCategoryRange:1024 SandboxImage:registry.k8s.io/pause:3.10 StatsCollectPeriod:10 SystemdCgroup:false EnableTLSStreaming:false X509KeyPairStreaming:{TLSCertFile: TLSKeyFile:} MaxContainerLogLineSize:16384 DisableCgroup:false DisableApparmor:false RestrictOOMScoreAdj:false MaxConcurrentDownloads:3 DisableProcMount:false UnsetSeccompProfile: TolerateMissing
HugetlbController:true DisableHugetlbController:true DeviceOwnershipFromSecurityContext:false IgnoreImageDefinedVolumes:false NetNSMountsUnderStateDir:false EnableUnprivilegedPorts:true EnableUnprivilegedICMP:false EnableCDI:false CDISpecDirs:[/etc/cdi /var/run/cdi] ImagePullProgressTimeout:5m0s DrainExecSyncIOTimeout:0s ImagePullWithSyncFs:false IgnoreDeprecationWarnings:[]} ContainerdRootDir:/var/lib/containerd ContainerdEndpoint:/run/containerd/containerd.sock RootDir:/var/lib/containerd/io.containerd.grpc.v1.cri StateDir:/run/containerd/io.containerd.grpc.v1.cri}"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319559044Z" level=info msg="Connect containerd service"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319595951Z" level=info msg="using legacy CRI server"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319604517Z" level=info msg="using experimental NRI integration - disable nri plugin to prevent this"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.319695421Z" level=info msg="Get image filesystem path \"/var/lib/containerd/io.containerd.snapshotter.v1.overlayfs\""
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.320725355Z" level=info msg=serving... address=/run/containerd/containerd.sock.ttrpc
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.320786015Z" level=info msg=serving... address=/run/containerd/containerd.sock
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.321270680Z" level=info msg="Start subscribing containerd event"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.321325367Z" level=info msg="Start recovering state"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.416492174Z" level=info msg="Start event monitor"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.416555681Z" level=info msg="Start snapshots syncer"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.416569178Z" level=info msg="Start cni network conf syncer for default"
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.416579574Z" level=info msg="Start streaming server"
	Sep 16 10:48:02 functional-911502 systemd[1]: Started containerd container runtime.
	Sep 16 10:48:02 functional-911502 containerd[2468]: time="2024-09-16T10:48:02.422346988Z" level=info msg="containerd successfully booted in 0.264155s"
	
	
	==> coredns [11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39497 - 35637 "HINFO IN 756688810597303784.5152931065563193714. udp 56 false 512" NXDOMAIN qr,rd,ra 56 0.040061481s
	
	
	==> describe nodes <==
	Name:               functional-911502
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-911502
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-911502
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_40_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:47:36 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-911502
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:47:59 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:47:49 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:47:49 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:47:49 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:47:49 +0000   Mon, 16 Sep 2024 10:47:36 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-911502
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 43a21049ac0d40628479cf884a8089e0
	  System UUID:                2830f6a5-4b63-46c5-b24a-468e4df19b79
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-6kw9d                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     24s
	  kube-system                 etcd-functional-911502                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         31s
	  kube-system                 kindnet-7r2rg                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      24s
	  kube-system                 kube-apiserver-functional-911502             250m (12%)    0 (0%)      0 (0%)           0 (0%)         29s
	  kube-system                 kube-controller-manager-functional-911502    200m (10%)    0 (0%)      0 (0%)           0 (0%)         29s
	  kube-system                 kube-proxy-l59dx                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         24s
	  kube-system                 kube-scheduler-functional-911502             100m (5%)     0 (0%)      0 (0%)           0 (0%)         29s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         24s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age   From             Message
	  ----     ------                   ----  ----             -------
	  Normal   Starting                 23s   kube-proxy       
	  Normal   Starting                 29s   kubelet          Starting kubelet.
	  Warning  CgroupV1                 29s   kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  29s   kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  29s   kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    29s   kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     29s   kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           25s   node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31] <==
	{"level":"info","ts":"2024-09-16T10:47:33.131560Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:47:33.139317Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:47:33.139485Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:47:33.138740Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:47:33.139730Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:47:33.254739Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T10:47:33.254930Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T10:47:33.255031Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 1"}
	{"level":"info","ts":"2024-09-16T10:47:33.255134Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.255208Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.255286Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.255359Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.262769Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.270884Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:47:33.271109Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.271402Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.272620Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.283805Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.271431Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284172Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284977Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.289591Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.306735Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306879Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306916Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	
	
	==> kernel <==
	 10:48:08 up 1 day, 14:30,  0 users,  load average: 2.58, 1.30, 1.39
	Linux functional-911502 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b] <==
	I0916 10:47:45.041351       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:47:45.041663       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:47:45.041804       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:47:45.041819       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:47:45.041830       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:47:45.520963       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:47:45.521152       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:47:45.521205       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:47:45.722171       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:47:45.722199       1 metrics.go:61] Registering metrics
	I0916 10:47:45.722266       1 controller.go:374] Syncing nftables rules
	I0916 10:47:55.524822       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:47:55.524859       1 main.go:299] handling current node
	I0916 10:48:05.526968       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:05.527019       1 main.go:299] handling current node
	
	
	==> kube-apiserver [a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19] <==
	I0916 10:47:36.526042       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:47:36.526212       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	E0916 10:47:36.567945       1 controller.go:145] "Failed to ensure lease exists, will retry" err="namespaces \"kube-system\" not found" interval="200ms"
	E0916 10:47:36.572000       1 controller.go:148] "Unhandled Error" err="while syncing ConfigMap \"kube-system/kube-apiserver-legacy-service-account-token-tracking\", err: namespaces \"kube-system\" not found" logger="UnhandledError"
	I0916 10:47:36.577644       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:47:36.580467       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:47:36.580621       1 policy_source.go:224] refreshing policies
	I0916 10:47:36.603683       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:47:36.607429       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 10:47:36.774829       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:47:37.212486       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0916 10:47:37.220769       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0916 10:47:37.221471       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:47:37.822723       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:47:37.869935       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:47:38.023960       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 10:47:38.032609       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2]
	I0916 10:47:38.034010       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:47:38.039301       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:47:38.366073       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:47:39.181277       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:47:39.195876       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 10:47:39.209196       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:47:43.921976       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 10:47:44.089159       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	
	
	==> kube-controller-manager [928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a] <==
	I0916 10:47:43.260717       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:47:43.260741       1 shared_informer.go:320] Caches are synced for legacy-service-account-token-cleaner
	I0916 10:47:43.260789       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:47:43.264947       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:47:43.270925       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:43.274016       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:47:43.289621       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:47:43.309403       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:47:43.755815       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810781       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810815       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:47:43.822097       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:44.310052       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="384.204702ms"
	I0916 10:47:44.362819       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="52.71544ms"
	I0916 10:47:44.426732       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="63.859952ms"
	I0916 10:47:44.459758       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="32.950043ms"
	I0916 10:47:44.479101       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="19.296296ms"
	I0916 10:47:44.479182       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.386µs"
	I0916 10:47:46.277218       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="54.999µs"
	I0916 10:47:46.284221       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="47.499µs"
	I0916 10:47:46.289165       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.016µs"
	I0916 10:47:49.776695       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:56.302801       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="57.165µs"
	I0916 10:47:57.316220       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="18.588165ms"
	I0916 10:47:57.316368       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="102.169µs"
	
	
	==> kube-proxy [57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6] <==
	I0916 10:47:44.794046       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:47:44.895525       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:47:44.895614       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:47:44.916570       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:47:44.916637       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:47:44.918597       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:47:44.919425       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:47:44.919452       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:47:44.925419       1 config.go:199] "Starting service config controller"
	I0916 10:47:44.925472       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:47:44.925521       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:47:44.925531       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:47:44.928903       1 config.go:328] "Starting node config controller"
	I0916 10:47:44.928927       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:47:45.030179       1 shared_informer.go:320] Caches are synced for node config
	I0916 10:47:45.030253       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:47:45.030287       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44] <==
	W0916 10:47:37.419477       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0916 10:47:37.419493       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419534       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:47:37.419548       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419590       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:47:37.419607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419645       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419660       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419704       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419719       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422428       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:47:37.422472       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422530       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:47:37.422547       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422779       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 10:47:37.422805       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 10:47:37.422862       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:47:37.422883       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424481       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.424516       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424588       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:47:37.424607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424692       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:47:37.424724       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:47:38.910602       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:47:44 functional-911502 kubelet[1540]: I0916 10:47:44.361582    1540 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 10:47:44 functional-911502 kubelet[1540]: E0916 10:47:44.392120    1540 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config-volume kube-api-access-qfzm8], unattached volumes=[], failed to process volumes=[]: context canceled" pod="kube-system/coredns-7c65d6cfc9-ks82c" podUID="0dc23912-43e2-42db-a49b-97f879c4f7b3"
	Sep 16 10:47:44 functional-911502 kubelet[1540]: I0916 10:47:44.729530    1540 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dzjt\" (UniqueName: \"kubernetes.io/projected/ecac562d-8318-4226-b5f1-61f2c76bb51b-kube-api-access-6dzjt\") pod \"storage-provisioner\" (UID: \"ecac562d-8318-4226-b5f1-61f2c76bb51b\") " pod="kube-system/storage-provisioner"
	Sep 16 10:47:44 functional-911502 kubelet[1540]: I0916 10:47:44.729588    1540 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/ecac562d-8318-4226-b5f1-61f2c76bb51b-tmp\") pod \"storage-provisioner\" (UID: \"ecac562d-8318-4226-b5f1-61f2c76bb51b\") " pod="kube-system/storage-provisioner"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.289061    1540 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kindnet-7r2rg" podStartSLOduration=1.289039949 podStartE2EDuration="1.289039949s" podCreationTimestamp="2024-09-16 10:47:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:47:45.250464079 +0000 UTC m=+6.287018312" watchObservedRunningTime="2024-09-16 10:47:45.289039949 +0000 UTC m=+6.325594190"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.289338    1540 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-proxy-l59dx" podStartSLOduration=1.289330753 podStartE2EDuration="1.289330753s" podCreationTimestamp="2024-09-16 10:47:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:47:45.288652505 +0000 UTC m=+6.325206746" watchObservedRunningTime="2024-09-16 10:47:45.289330753 +0000 UTC m=+6.325884994"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: E0916 10:47:45.327027    1540 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\": failed to find network info for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\""
	Sep 16 10:47:45 functional-911502 kubelet[1540]: E0916 10:47:45.327169    1540 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\": failed to find network info for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\"" pod="kube-system/coredns-7c65d6cfc9-6kw9d"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: E0916 10:47:45.327193    1540 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\": failed to find network info for sandbox \"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\"" pod="kube-system/coredns-7c65d6cfc9-6kw9d"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: E0916 10:47:45.327263    1540 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-6kw9d_kube-system(072167c7-fa1a-463e-a957-91ea24020387)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-6kw9d_kube-system(072167c7-fa1a-463e-a957-91ea24020387)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\\\": failed to find network info for sandbox \\\"f0148ea8f08ac7033418e4fa177ba42f095774afe8559fd0119146564cbe55e0\\\"\"" pod="kube-system/coredns-7c65d6cfc9-6kw9d" podUID="072167c7-fa1a-463e-a957-91ea24020387"
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.341081    1540 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfzm8\" (UniqueName: \"kubernetes.io/projected/0dc23912-43e2-42db-a49b-97f879c4f7b3-kube-api-access-qfzm8\") pod \"0dc23912-43e2-42db-a49b-97f879c4f7b3\" (UID: \"0dc23912-43e2-42db-a49b-97f879c4f7b3\") "
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.341496    1540 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0dc23912-43e2-42db-a49b-97f879c4f7b3-config-volume\") pod \"0dc23912-43e2-42db-a49b-97f879c4f7b3\" (UID: \"0dc23912-43e2-42db-a49b-97f879c4f7b3\") "
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.345768    1540 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0dc23912-43e2-42db-a49b-97f879c4f7b3-config-volume" (OuterVolumeSpecName: "config-volume") pod "0dc23912-43e2-42db-a49b-97f879c4f7b3" (UID: "0dc23912-43e2-42db-a49b-97f879c4f7b3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue ""
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.347205    1540 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dc23912-43e2-42db-a49b-97f879c4f7b3-kube-api-access-qfzm8" (OuterVolumeSpecName: "kube-api-access-qfzm8") pod "0dc23912-43e2-42db-a49b-97f879c4f7b3" (UID: "0dc23912-43e2-42db-a49b-97f879c4f7b3"). InnerVolumeSpecName "kube-api-access-qfzm8". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.442441    1540 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-qfzm8\" (UniqueName: \"kubernetes.io/projected/0dc23912-43e2-42db-a49b-97f879c4f7b3-kube-api-access-qfzm8\") on node \"functional-911502\" DevicePath \"\""
	Sep 16 10:47:45 functional-911502 kubelet[1540]: I0916 10:47:45.442490    1540 reconciler_common.go:288] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0dc23912-43e2-42db-a49b-97f879c4f7b3-config-volume\") on node \"functional-911502\" DevicePath \"\""
	Sep 16 10:47:47 functional-911502 kubelet[1540]: I0916 10:47:47.080115    1540 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dc23912-43e2-42db-a49b-97f879c4f7b3" path="/var/lib/kubelet/pods/0dc23912-43e2-42db-a49b-97f879c4f7b3/volumes"
	Sep 16 10:47:47 functional-911502 kubelet[1540]: I0916 10:47:47.380422    1540 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/storage-provisioner" podStartSLOduration=3.380393704 podStartE2EDuration="3.380393704s" podCreationTimestamp="2024-09-16 10:47:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:47:46.302196758 +0000 UTC m=+7.338751015" watchObservedRunningTime="2024-09-16 10:47:47.380393704 +0000 UTC m=+8.416947937"
	Sep 16 10:47:49 functional-911502 kubelet[1540]: I0916 10:47:49.759905    1540 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 10:47:49 functional-911502 kubelet[1540]: I0916 10:47:49.761214    1540 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 10:47:57 functional-911502 kubelet[1540]: I0916 10:47:57.295790    1540 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-7c65d6cfc9-6kw9d" podStartSLOduration=13.295770937 podStartE2EDuration="13.295770937s" podCreationTimestamp="2024-09-16 10:47:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:47:56.309986989 +0000 UTC m=+17.346541230" watchObservedRunningTime="2024-09-16 10:47:57.295770937 +0000 UTC m=+18.332325178"
	Sep 16 10:48:02 functional-911502 kubelet[1540]: W0916 10:48:02.313775    1540 logging.go:55] [core] [Channel #1 SubChannel #2]grpc: addrConn.createTransport failed to connect to {Addr: "/run/containerd/containerd.sock", ServerName: "localhost", }. Err: connection error: desc = "transport: Error while dialing: dial unix /run/containerd/containerd.sock: connect: no such file or directory"
	Sep 16 10:48:02 functional-911502 kubelet[1540]: E0916 10:48:02.313866    1540 log.go:32] "ListPodSandbox with filter from runtime service failed" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial unix /run/containerd/containerd.sock: connect: no such file or directory\"" filter="nil"
	Sep 16 10:48:02 functional-911502 kubelet[1540]: E0916 10:48:02.313905    1540 kuberuntime_sandbox.go:305] "Failed to list pod sandboxes" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial unix /run/containerd/containerd.sock: connect: no such file or directory\""
	Sep 16 10:48:02 functional-911502 kubelet[1540]: E0916 10:48:02.313919    1540 generic.go:238] "GenericPLEG: Unable to retrieve pods" err="rpc error: code = Unavailable desc = connection error: desc = \"transport: Error while dialing: dial unix /run/containerd/containerd.sock: connect: no such file or directory\""
	
	
	==> storage-provisioner [8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27] <==
	I0916 10:47:45.429681       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:47:45.445556       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:47:45.446351       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:47:45.454850       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:47:45.455177       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-911502_5b879e6d-55a8-4fac-8967-8695891e6ebb!
	I0916 10:47:45.456424       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"28a802e4-0156-4c92-adef-4d6f2592a206", APIVersion:"v1", ResourceVersion:"387", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-911502_5b879e6d-55a8-4fac-8967-8695891e6ebb became leader
	I0916 10:47:45.555346       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-911502_5b879e6d-55a8-4fac-8967-8695891e6ebb!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-911502 -n functional-911502
helpers_test.go:261: (dbg) Run:  kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (514.319µs)
helpers_test.go:263: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/serial/KubectlGetPods (2.25s)

                                                
                                    
x
+
TestFunctional/serial/ComponentHealth (2.87s)

                                                
                                                
=== RUN   TestFunctional/serial/ComponentHealth
functional_test.go:810: (dbg) Run:  kubectl --context functional-911502 get po -l tier=control-plane -n kube-system -o=json
functional_test.go:810: (dbg) Non-zero exit: kubectl --context functional-911502 get po -l tier=control-plane -n kube-system -o=json: fork/exec /usr/local/bin/kubectl: exec format error (1.027949ms)
functional_test.go:812: failed to get components. args "kubectl --context functional-911502 get po -l tier=control-plane -n kube-system -o=json": fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/serial/ComponentHealth]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-911502
helpers_test.go:235: (dbg) docker inspect functional-911502:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1",
	        "Created": "2024-09-16T10:47:14.597354828Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2085675,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:47:14.7319597Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hostname",
	        "HostsPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hosts",
	        "LogPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1-json.log",
	        "Name": "/functional-911502",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-911502:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-911502",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/merged",
	                "UpperDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/diff",
	                "WorkDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-911502",
	                "Source": "/var/lib/docker/volumes/functional-911502/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-911502",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-911502",
	                "name.minikube.sigs.k8s.io": "functional-911502",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "44bb4c6f2ec0f0eef04adb8f886d0e0de7d31ae50612de741bed0ee945b2b75e",
	            "SandboxKey": "/var/run/docker/netns/44bb4c6f2ec0",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40592"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40593"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40596"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40594"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40595"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-911502": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "8c4428adf23c812318456ac17bea5953b33d7961994dfc84c0ff82a45764b662",
	                    "EndpointID": "8b3cc6f2c9f87b61b7e755d7ecd320ed6313887dfb3deab9f4e0858aa1c9fe80",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-911502",
	                        "9bf795605895"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-911502 -n functional-911502
helpers_test.go:244: <<< TestFunctional/serial/ComponentHealth FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/serial/ComponentHealth]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 logs -n 25: (2.049972986s)
helpers_test.go:252: TestFunctional/serial/ComponentHealth logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| Command |                                   Args                                   |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| unpause | nospam-826306 --log_dir                                                  | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|         | /tmp/nospam-826306 unpause                                               |                   |         |         |                     |                     |
	| unpause | nospam-826306 --log_dir                                                  | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|         | /tmp/nospam-826306 unpause                                               |                   |         |         |                     |                     |
	| unpause | nospam-826306 --log_dir                                                  | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|         | /tmp/nospam-826306 unpause                                               |                   |         |         |                     |                     |
	| stop    | nospam-826306 --log_dir                                                  | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|         | /tmp/nospam-826306 stop                                                  |                   |         |         |                     |                     |
	| stop    | nospam-826306 --log_dir                                                  | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|         | /tmp/nospam-826306 stop                                                  |                   |         |         |                     |                     |
	| stop    | nospam-826306 --log_dir                                                  | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|         | /tmp/nospam-826306 stop                                                  |                   |         |         |                     |                     |
	| delete  | -p nospam-826306                                                         | nospam-826306     | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	| start   | -p functional-911502                                                     | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:47 UTC |
	|         | --memory=4000                                                            |                   |         |         |                     |                     |
	|         | --apiserver-port=8441                                                    |                   |         |         |                     |                     |
	|         | --wait=all --driver=docker                                               |                   |         |         |                     |                     |
	|         | --container-runtime=containerd                                           |                   |         |         |                     |                     |
	| start   | -p functional-911502                                                     | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:47 UTC | 16 Sep 24 10:48 UTC |
	|         | --alsologtostderr -v=8                                                   |                   |         |         |                     |                     |
	| cache   | functional-911502 cache add                                              | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:3.1                                                |                   |         |         |                     |                     |
	| cache   | functional-911502 cache add                                              | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:3.3                                                |                   |         |         |                     |                     |
	| cache   | functional-911502 cache add                                              | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| cache   | functional-911502 cache add                                              | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | minikube-local-cache-test:functional-911502                              |                   |         |         |                     |                     |
	| cache   | functional-911502 cache delete                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | minikube-local-cache-test:functional-911502                              |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:3.3                                                |                   |         |         |                     |                     |
	| cache   | list                                                                     | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	| ssh     | functional-911502 ssh sudo                                               | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | crictl images                                                            |                   |         |         |                     |                     |
	| ssh     | functional-911502                                                        | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | ssh sudo crictl rmi                                                      |                   |         |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC |                     |
	|         | sudo crictl inspecti                                                     |                   |         |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| cache   | functional-911502 cache reload                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	| ssh     | functional-911502 ssh                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | sudo crictl inspecti                                                     |                   |         |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:3.1                                                |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| kubectl | functional-911502 kubectl --                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | --context functional-911502                                              |                   |         |         |                     |                     |
	|         | get pods                                                                 |                   |         |         |                     |                     |
	| start   | -p functional-911502                                                     | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision |                   |         |         |                     |                     |
	|         | --wait=all                                                               |                   |         |         |                     |                     |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:48:18
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:48:18.056558 2090441 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:48:18.056731 2090441 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:18.056736 2090441 out.go:358] Setting ErrFile to fd 2...
	I0916 10:48:18.056744 2090441 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:18.057119 2090441 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:48:18.057904 2090441 out.go:352] Setting JSON to false
	I0916 10:48:18.059612 2090441 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":138640,"bootTime":1726345058,"procs":201,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:48:18.060116 2090441 start.go:139] virtualization:  
	I0916 10:48:18.063723 2090441 out.go:177] * [functional-911502] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:48:18.066427 2090441 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:48:18.066506 2090441 notify.go:220] Checking for updates...
	I0916 10:48:18.071616 2090441 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:48:18.074552 2090441 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:18.077059 2090441 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:48:18.079403 2090441 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:48:18.081886 2090441 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:48:18.085175 2090441 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:18.085267 2090441 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:48:18.118972 2090441 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:48:18.119082 2090441 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:18.187814 2090441 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:84 SystemTime:2024-09-16 10:48:18.177439051 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:18.187915 2090441 docker.go:318] overlay module found
	I0916 10:48:18.190819 2090441 out.go:177] * Using the docker driver based on existing profile
	I0916 10:48:18.193361 2090441 start.go:297] selected driver: docker
	I0916 10:48:18.193371 2090441 start.go:901] validating driver "docker" against &{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountU
ID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:18.193484 2090441 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:48:18.193591 2090441 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:18.252173 2090441 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:84 SystemTime:2024-09-16 10:48:18.241918971 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:18.252721 2090441 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:18.252750 2090441 cni.go:84] Creating CNI manager for ""
	I0916 10:48:18.252804 2090441 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:48:18.252849 2090441 start.go:340] cluster config:
	{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUI
D:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:18.255882 2090441 out.go:177] * Starting "functional-911502" primary control-plane node in "functional-911502" cluster
	I0916 10:48:18.258466 2090441 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:48:18.261084 2090441 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:48:18.263652 2090441 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:48:18.263697 2090441 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:48:18.263702 2090441 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:48:18.263711 2090441 cache.go:56] Caching tarball of preloaded images
	I0916 10:48:18.263789 2090441 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:48:18.263812 2090441 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:48:18.263919 2090441 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/config.json ...
	W0916 10:48:18.282640 2090441 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:48:18.282651 2090441 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:48:18.282779 2090441 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:48:18.282796 2090441 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:48:18.282799 2090441 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:48:18.282806 2090441 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:48:18.282811 2090441 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:48:18.403176 2090441 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:48:18.403224 2090441 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:48:18.403279 2090441 start.go:360] acquireMachinesLock for functional-911502: {Name:mk182321dd921c9bc14d73d2af41d001efc879fd Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:48:18.403399 2090441 start.go:364] duration metric: took 82.79µs to acquireMachinesLock for "functional-911502"
	I0916 10:48:18.403430 2090441 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:48:18.403435 2090441 fix.go:54] fixHost starting: 
	I0916 10:48:18.403813 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:18.421104 2090441 fix.go:112] recreateIfNeeded on functional-911502: state=Running err=<nil>
	W0916 10:48:18.421131 2090441 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:48:18.424219 2090441 out.go:177] * Updating the running docker "functional-911502" container ...
	I0916 10:48:18.426570 2090441 machine.go:93] provisionDockerMachine start ...
	I0916 10:48:18.426707 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:18.443892 2090441 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:18.444150 2090441 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:48:18.444157 2090441 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:48:18.582615 2090441 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-911502
	
	I0916 10:48:18.582628 2090441 ubuntu.go:169] provisioning hostname "functional-911502"
	I0916 10:48:18.582729 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:18.601133 2090441 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:18.601387 2090441 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:48:18.601402 2090441 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-911502 && echo "functional-911502" | sudo tee /etc/hostname
	I0916 10:48:18.750562 2090441 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-911502
	
	I0916 10:48:18.750635 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:18.768271 2090441 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:18.768521 2090441 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:48:18.768536 2090441 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-911502' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-911502/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-911502' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:48:18.907246 2090441 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:48:18.907263 2090441 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:48:18.907293 2090441 ubuntu.go:177] setting up certificates
	I0916 10:48:18.907302 2090441 provision.go:84] configureAuth start
	I0916 10:48:18.907364 2090441 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-911502
	I0916 10:48:18.927347 2090441 provision.go:143] copyHostCerts
	I0916 10:48:18.927406 2090441 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:48:18.927422 2090441 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:48:18.927486 2090441 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:48:18.927589 2090441 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:48:18.927593 2090441 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:48:18.927630 2090441 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:48:18.927703 2090441 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:48:18.927706 2090441 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:48:18.927733 2090441 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:48:18.927784 2090441 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.functional-911502 san=[127.0.0.1 192.168.49.2 functional-911502 localhost minikube]
	I0916 10:48:19.991257 2090441 provision.go:177] copyRemoteCerts
	I0916 10:48:19.991315 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:48:19.991358 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.029993 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.128591 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:48:20.156277 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1220 bytes)
	I0916 10:48:20.183185 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:48:20.209618 2090441 provision.go:87] duration metric: took 1.302302469s to configureAuth
	I0916 10:48:20.209635 2090441 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:48:20.209838 2090441 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:20.209844 2090441 machine.go:96] duration metric: took 1.783266636s to provisionDockerMachine
	I0916 10:48:20.209851 2090441 start.go:293] postStartSetup for "functional-911502" (driver="docker")
	I0916 10:48:20.209861 2090441 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:48:20.209924 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:48:20.209968 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.227087 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.323907 2090441 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:48:20.327142 2090441 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:48:20.327167 2090441 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:48:20.327179 2090441 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:48:20.327185 2090441 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:48:20.327194 2090441 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:48:20.327249 2090441 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:48:20.327327 2090441 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:48:20.327402 2090441 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts -> hosts in /etc/test/nested/copy/2063326
	I0916 10:48:20.327447 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/2063326
	I0916 10:48:20.336043 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:48:20.360460 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts --> /etc/test/nested/copy/2063326/hosts (40 bytes)
	I0916 10:48:20.385297 2090441 start.go:296] duration metric: took 175.431776ms for postStartSetup
	I0916 10:48:20.385378 2090441 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:48:20.385419 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.402295 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.495689 2090441 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:48:20.500214 2090441 fix.go:56] duration metric: took 2.096771088s for fixHost
	I0916 10:48:20.500228 2090441 start.go:83] releasing machines lock for "functional-911502", held for 2.096820746s
	I0916 10:48:20.500311 2090441 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-911502
	I0916 10:48:20.517203 2090441 ssh_runner.go:195] Run: cat /version.json
	I0916 10:48:20.517249 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.517492 2090441 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:48:20.517559 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.534860 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.538812 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.630195 2090441 ssh_runner.go:195] Run: systemctl --version
	I0916 10:48:20.761370 2090441 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:48:20.765788 2090441 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:48:20.785230 2090441 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:48:20.785301 2090441 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:48:20.794254 2090441 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:48:20.794269 2090441 start.go:495] detecting cgroup driver to use...
	I0916 10:48:20.794301 2090441 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:48:20.794353 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:48:20.807440 2090441 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:48:20.819052 2090441 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:48:20.819108 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:48:20.832763 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:48:20.845443 2090441 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:48:20.973966 2090441 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:48:21.096206 2090441 docker.go:233] disabling docker service ...
	I0916 10:48:21.096283 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:48:21.120231 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:48:21.134274 2090441 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:48:21.245027 2090441 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:48:21.353592 2090441 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:48:21.366138 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:48:21.385216 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:48:21.397039 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:48:21.408473 2090441 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:48:21.408530 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:48:21.419890 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:48:21.430748 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:48:21.441177 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:48:21.452981 2090441 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:48:21.463398 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:48:21.474455 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:48:21.485837 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:48:21.495989 2090441 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:48:21.504821 2090441 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:48:21.514007 2090441 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:21.630033 2090441 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:48:21.929885 2090441 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:48:21.929958 2090441 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:48:21.934261 2090441 start.go:563] Will wait 60s for crictl version
	I0916 10:48:21.934371 2090441 ssh_runner.go:195] Run: which crictl
	I0916 10:48:21.937870 2090441 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:48:21.977142 2090441 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:48:21.977214 2090441 ssh_runner.go:195] Run: containerd --version
	I0916 10:48:21.999841 2090441 ssh_runner.go:195] Run: containerd --version
	I0916 10:48:22.030027 2090441 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:48:22.032799 2090441 cli_runner.go:164] Run: docker network inspect functional-911502 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:48:22.049379 2090441 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:48:22.056006 2090441 out.go:177]   - apiserver.enable-admission-plugins=NamespaceAutoProvision
	I0916 10:48:22.058597 2090441 kubeadm.go:883] updating cluster {Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA API
ServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: Mou
ntMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:48:22.058756 2090441 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:48:22.058847 2090441 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:22.096492 2090441 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:48:22.096505 2090441 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:48:22.096567 2090441 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:22.140078 2090441 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:48:22.140090 2090441 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:48:22.140096 2090441 kubeadm.go:934] updating node { 192.168.49.2 8441 v1.31.1 containerd true true} ...
	I0916 10:48:22.140203 2090441 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=functional-911502 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:48:22.140274 2090441 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:48:22.179208 2090441 extraconfig.go:124] Overwriting default enable-admission-plugins=NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota with user provided enable-admission-plugins=NamespaceAutoProvision for component apiserver
	I0916 10:48:22.179227 2090441 cni.go:84] Creating CNI manager for ""
	I0916 10:48:22.179236 2090441 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:48:22.179244 2090441 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:48:22.179266 2090441 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8441 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:functional-911502 NodeName:functional-911502 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceAutoProvision] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfi
gOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:48:22.179387 2090441 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8441
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "functional-911502"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceAutoProvision"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8441
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:48:22.179477 2090441 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:48:22.188765 2090441 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:48:22.188832 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:48:22.197782 2090441 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (321 bytes)
	I0916 10:48:22.216639 2090441 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:48:22.234768 2090441 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2021 bytes)
	I0916 10:48:22.253522 2090441 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:48:22.257457 2090441 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:22.374849 2090441 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:22.388543 2090441 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502 for IP: 192.168.49.2
	I0916 10:48:22.388555 2090441 certs.go:194] generating shared ca certs ...
	I0916 10:48:22.388570 2090441 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:22.388723 2090441 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:48:22.388763 2090441 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:48:22.388769 2090441 certs.go:256] generating profile certs ...
	I0916 10:48:22.388849 2090441 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.key
	I0916 10:48:22.388891 2090441 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key.03a9d60c
	I0916 10:48:22.388929 2090441 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key
	I0916 10:48:22.389051 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:48:22.389077 2090441 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:48:22.389085 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:48:22.389109 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:48:22.389129 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:48:22.389149 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:48:22.389190 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:48:22.389803 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:48:22.417755 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:48:22.444790 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:48:22.469829 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:48:22.494601 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:48:22.519153 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:48:22.545011 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:48:22.569691 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:48:22.595014 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:48:22.619318 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:48:22.644259 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:48:22.668772 2090441 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:48:22.686564 2090441 ssh_runner.go:195] Run: openssl version
	I0916 10:48:22.692173 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:48:22.702850 2090441 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:22.706341 2090441 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:22.706407 2090441 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:22.713233 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:48:22.722032 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:48:22.731748 2090441 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:48:22.735358 2090441 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:48:22.735429 2090441 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:48:22.742406 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:48:22.751458 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:48:22.760959 2090441 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:48:22.764337 2090441 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:48:22.764391 2090441 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:48:22.771679 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:48:22.780664 2090441 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:48:22.784169 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:48:22.790852 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:48:22.797686 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:48:22.804722 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:48:22.811546 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:48:22.818077 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:48:22.824812 2090441 kubeadm.go:392] StartCluster: {Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISer
verNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountM
Size:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:22.824893 2090441 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:48:22.824962 2090441 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:48:22.863779 2090441 cri.go:89] found id: "11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950"
	I0916 10:48:22.863790 2090441 cri.go:89] found id: "8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27"
	I0916 10:48:22.863793 2090441 cri.go:89] found id: "ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b"
	I0916 10:48:22.863796 2090441 cri.go:89] found id: "57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6"
	I0916 10:48:22.863798 2090441 cri.go:89] found id: "a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	I0916 10:48:22.863801 2090441 cri.go:89] found id: "492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31"
	I0916 10:48:22.863804 2090441 cri.go:89] found id: "31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44"
	I0916 10:48:22.863812 2090441 cri.go:89] found id: "928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a"
	I0916 10:48:22.863814 2090441 cri.go:89] found id: ""
	I0916 10:48:22.863867 2090441 ssh_runner.go:195] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0916 10:48:22.896045 2090441 cri.go:116] JSON = [{"ociVersion":"1.0.2-dev","id":"11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","pid":2200,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950/rootfs","created":"2024-09-16T10:47:56.240222491Z","annotations":{"io.kubernetes.cri.container-name":"coredns","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/coredns/coredns:v1.11.3","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"31265291ac7da492c3c
fad84540ba2b684cdf0abad82be5c56d392df7613dc44","pid":1422,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44/rootfs","created":"2024-09-16T10:47:32.746424926Z","annotations":{"io.kubernetes.cri.container-name":"kube-scheduler","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-scheduler:v1.31.1","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","pid":1960,"status":"running","bundle":"/run/containe
rd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c/rootfs","created":"2024-09-16T10:47:45.184280835Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_storage-provisioner_ecac562d-8318-4226-b5f1-61f2c76bb51b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6
b99ce6c31","pid":1486,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31/rootfs","created":"2024-09-16T10:47:32.919386537Z","annotations":{"io.kubernetes.cri.container-name":"etcd","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/etcd:3.5.15-0","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","pid":1315,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de05
1f556893ed4577eb8b5d9d38835da8d64517b7a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a/rootfs","created":"2024-09-16T10:47:32.575723929Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"256","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-apiserver-functional-911502_846e81f7bcac6804cf5ef499ea5ac265","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","pid":1316,"status":"running","bun
dle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f/rootfs","created":"2024-09-16T10:47:32.562785558Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"204","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-controller-manager-functional-911502_26c4a2e985a1c721e0411e5d9497a35b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id"
:"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","pid":1338,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7/rootfs","created":"2024-09-16T10:47:32.61203341Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-scheduler-functional-911502_69dae9ff35c780f43a15f539d6f19e46","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid"
:"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","pid":1792,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6/rootfs","created":"2024-09-16T10:47:44.701696235Z","annotations":{"io.kubernetes.cri.container-name":"kube-proxy","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-proxy:v1.31.1","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"8aede76947864ca07593bc24b939a29faf7
bb7dd85244f30f18f232f3ec1ea27","pid":2092,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27/rootfs","created":"2024-09-16T10:47:45.396938875Z","annotations":{"io.kubernetes.cri.container-name":"storage-provisioner","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"gcr.io/k8s-minikube/storage-provisioner:v5","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","pid":1399,"status":"running","bundle":"/run/containerd/io.containerd
.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a/rootfs","created":"2024-09-16T10:47:32.722647206Z","annotations":{"io.kubernetes.cri.container-name":"kube-controller-manager","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-controller-manager:v1.31.1","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","pid":2166,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3e
e6fc0070983ddfd","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd/rootfs","created":"2024-09-16T10:47:56.156755788Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_coredns-7c65d6cfc9-6kw9d_072167c7-fa1a-463e-a957-91ea24020387","io.kubernetes.cri.sandbox-memory":"178257920","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","pid":1479,"status":"running","bundle":"/run/containerd/io
.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19/rootfs","created":"2024-09-16T10:47:32.854949973Z","annotations":{"io.kubernetes.cri.container-name":"kube-apiserver","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-apiserver:v1.31.1","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","pid":1768,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e"
,"rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e/rootfs","created":"2024-09-16T10:47:44.62156803Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"10000","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kindnet-7r2rg_ab52a601-e0fe-4f60-a202-477487da9bb2","io.kubernetes.cri.sandbox-memory":"52428800","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","pid":1733,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c
900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1/rootfs","created":"2024-09-16T10:47:44.525242585Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-proxy-l59dx_72a26843-9f97-4121-91f0-3cb389048315","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","pid":1835,"status":"running","bundl
e":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b/rootfs","created":"2024-09-16T10:47:44.827883907Z","annotations":{"io.kubernetes.cri.container-name":"kindnet-cni","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"docker.io/kindest/kindnetd:v20240813-c6f155d6","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","pid":1295,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedb
e08802f86e","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e/rootfs","created":"2024-09-16T10:47:32.534113117Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_etcd-functional-911502_c7ab8017ca620f2ba7e026f4cdb427a2","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"}]
	I0916 10:48:22.896348 2090441 cri.go:126] list returned 16 containers
	I0916 10:48:22.896356 2090441 cri.go:129] container: {ID:11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 Status:running}
	I0916 10:48:22.896377 2090441 cri.go:135] skipping {11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 running}: state = "running", want "paused"
	I0916 10:48:22.896384 2090441 cri.go:129] container: {ID:31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 Status:running}
	I0916 10:48:22.896389 2090441 cri.go:135] skipping {31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 running}: state = "running", want "paused"
	I0916 10:48:22.896394 2090441 cri.go:129] container: {ID:334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c Status:running}
	I0916 10:48:22.896399 2090441 cri.go:131] skipping 334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c - not in ps
	I0916 10:48:22.896404 2090441 cri.go:129] container: {ID:492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 Status:running}
	I0916 10:48:22.896409 2090441 cri.go:135] skipping {492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 running}: state = "running", want "paused"
	I0916 10:48:22.896414 2090441 cri.go:129] container: {ID:51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a Status:running}
	I0916 10:48:22.896418 2090441 cri.go:131] skipping 51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a - not in ps
	I0916 10:48:22.896421 2090441 cri.go:129] container: {ID:54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f Status:running}
	I0916 10:48:22.896424 2090441 cri.go:131] skipping 54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f - not in ps
	I0916 10:48:22.896427 2090441 cri.go:129] container: {ID:578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7 Status:running}
	I0916 10:48:22.896430 2090441 cri.go:131] skipping 578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7 - not in ps
	I0916 10:48:22.896433 2090441 cri.go:129] container: {ID:57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 Status:running}
	I0916 10:48:22.896438 2090441 cri.go:135] skipping {57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 running}: state = "running", want "paused"
	I0916 10:48:22.896442 2090441 cri.go:129] container: {ID:8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 Status:running}
	I0916 10:48:22.896447 2090441 cri.go:135] skipping {8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 running}: state = "running", want "paused"
	I0916 10:48:22.896451 2090441 cri.go:129] container: {ID:928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a Status:running}
	I0916 10:48:22.896455 2090441 cri.go:135] skipping {928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a running}: state = "running", want "paused"
	I0916 10:48:22.896459 2090441 cri.go:129] container: {ID:95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd Status:running}
	I0916 10:48:22.896464 2090441 cri.go:131] skipping 95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd - not in ps
	I0916 10:48:22.896466 2090441 cri.go:129] container: {ID:a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 Status:running}
	I0916 10:48:22.896470 2090441 cri.go:135] skipping {a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 running}: state = "running", want "paused"
	I0916 10:48:22.896474 2090441 cri.go:129] container: {ID:b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e Status:running}
	I0916 10:48:22.896478 2090441 cri.go:131] skipping b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e - not in ps
	I0916 10:48:22.896480 2090441 cri.go:129] container: {ID:c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1 Status:running}
	I0916 10:48:22.896484 2090441 cri.go:131] skipping c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1 - not in ps
	I0916 10:48:22.896491 2090441 cri.go:129] container: {ID:ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b Status:running}
	I0916 10:48:22.896497 2090441 cri.go:135] skipping {ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b running}: state = "running", want "paused"
	I0916 10:48:22.896502 2090441 cri.go:129] container: {ID:e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e Status:running}
	I0916 10:48:22.896506 2090441 cri.go:131] skipping e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e - not in ps
	I0916 10:48:22.896558 2090441 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:48:22.906082 2090441 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 10:48:22.906092 2090441 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 10:48:22.906148 2090441 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 10:48:22.914917 2090441 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:48:22.915488 2090441 kubeconfig.go:125] found "functional-911502" server: "https://192.168.49.2:8441"
	I0916 10:48:22.916782 2090441 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 10:48:22.925899 2090441 kubeadm.go:640] detected kubeadm config drift (will reconfigure cluster from new /var/tmp/minikube/kubeadm.yaml):
	-- stdout --
	--- /var/tmp/minikube/kubeadm.yaml	2024-09-16 10:47:21.242945037 +0000
	+++ /var/tmp/minikube/kubeadm.yaml.new	2024-09-16 10:48:22.250558320 +0000
	@@ -22,7 +22,7 @@
	 apiServer:
	   certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	   extraArgs:
	-    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	+    enable-admission-plugins: "NamespaceAutoProvision"
	 controllerManager:
	   extraArgs:
	     allocate-node-cidrs: "true"
	
	-- /stdout --
	I0916 10:48:22.925911 2090441 kubeadm.go:1160] stopping kube-system containers ...
	I0916 10:48:22.925922 2090441 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name: Namespaces:[kube-system]}
	I0916 10:48:22.925986 2090441 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:48:22.969464 2090441 cri.go:89] found id: "11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950"
	I0916 10:48:22.969476 2090441 cri.go:89] found id: "8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27"
	I0916 10:48:22.969480 2090441 cri.go:89] found id: "ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b"
	I0916 10:48:22.969488 2090441 cri.go:89] found id: "57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6"
	I0916 10:48:22.969491 2090441 cri.go:89] found id: "a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	I0916 10:48:22.969494 2090441 cri.go:89] found id: "492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31"
	I0916 10:48:22.969497 2090441 cri.go:89] found id: "31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44"
	I0916 10:48:22.969499 2090441 cri.go:89] found id: "928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a"
	I0916 10:48:22.969502 2090441 cri.go:89] found id: ""
	I0916 10:48:22.969506 2090441 cri.go:252] Stopping containers: [11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b 57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a]
	I0916 10:48:22.969568 2090441 ssh_runner.go:195] Run: which crictl
	I0916 10:48:22.973333 2090441 ssh_runner.go:195] Run: sudo /usr/bin/crictl stop --timeout=10 11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b 57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a
	I0916 10:48:38.633471 2090441 ssh_runner.go:235] Completed: sudo /usr/bin/crictl stop --timeout=10 11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b 57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a: (15.660102208s)
	I0916 10:48:38.633532 2090441 ssh_runner.go:195] Run: sudo systemctl stop kubelet
	I0916 10:48:38.739961 2090441 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:48:38.750010 2090441 kubeadm.go:157] found existing configuration files:
	-rw------- 1 root root 5651 Sep 16 10:47 /etc/kubernetes/admin.conf
	-rw------- 1 root root 5652 Sep 16 10:47 /etc/kubernetes/controller-manager.conf
	-rw------- 1 root root 2007 Sep 16 10:47 /etc/kubernetes/kubelet.conf
	-rw------- 1 root root 5600 Sep 16 10:47 /etc/kubernetes/scheduler.conf
	
	I0916 10:48:38.750102 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/admin.conf
	I0916 10:48:38.760985 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/kubelet.conf
	I0916 10:48:38.771244 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/controller-manager.conf
	I0916 10:48:38.781324 2090441 kubeadm.go:163] "https://control-plane.minikube.internal:8441" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/controller-manager.conf: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:48:38.781386 2090441 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:48:38.791843 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/scheduler.conf
	I0916 10:48:38.802301 2090441 kubeadm.go:163] "https://control-plane.minikube.internal:8441" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/scheduler.conf: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:48:38.802359 2090441 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:48:38.811917 2090441 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:48:38.825359 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase certs all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:38.890532 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase kubeconfig all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.317726 2090441 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase kubeconfig all --config /var/tmp/minikube/kubeadm.yaml": (1.427159279s)
	I0916 10:48:40.317743 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase kubelet-start --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.550624 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase control-plane all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.692801 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase etcd local --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.850212 2090441 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:48:40.850281 2090441 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:48:40.873399 2090441 api_server.go:72] duration metric: took 23.195463ms to wait for apiserver process to appear ...
	I0916 10:48:40.873414 2090441 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:48:40.873442 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.076555 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	W0916 10:48:42.076579 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	I0916 10:48:42.076592 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.086814 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	W0916 10:48:42.086832 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	I0916 10:48:42.374183 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.383929 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:42.383952 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:42.874448 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.911477 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:42.911495 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:43.373607 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:43.373978 2090441 api_server.go:269] stopped: https://192.168.49.2:8441/healthz: Get "https://192.168.49.2:8441/healthz": dial tcp 192.168.49.2:8441: connect: connection refused
	I0916 10:48:43.873567 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:43.873950 2090441 api_server.go:269] stopped: https://192.168.49.2:8441/healthz: Get "https://192.168.49.2:8441/healthz": dial tcp 192.168.49.2:8441: connect: connection refused
	I0916 10:48:44.373541 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:46.967237 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	W0916 10:48:46.967252 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	I0916 10:48:46.967269 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:47.234554 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:47.234573 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:47.373856 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:47.381648 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:47.381668 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:47.874262 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:47.881898 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:47.881918 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:48.373987 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:48.381643 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
	ok
	I0916 10:48:48.395482 2090441 api_server.go:141] control plane version: v1.31.1
	I0916 10:48:48.395506 2090441 api_server.go:131] duration metric: took 7.522086105s to wait for apiserver health ...
	I0916 10:48:48.395515 2090441 cni.go:84] Creating CNI manager for ""
	I0916 10:48:48.395522 2090441 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:48:48.398238 2090441 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:48:48.400734 2090441 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:48:48.404728 2090441 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:48:48.404739 2090441 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:48:48.426027 2090441 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:48:48.844610 2090441 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:48:48.855743 2090441 system_pods.go:59] 8 kube-system pods found
	I0916 10:48:48.855765 2090441 system_pods.go:61] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running
	I0916 10:48:48.855775 2090441 system_pods.go:61] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 10:48:48.855785 2090441 system_pods.go:61] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:48.855802 2090441 system_pods.go:61] "kube-apiserver-functional-911502" [49c666a4-4c79-4cfd-9ed9-f8307aea2147] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 10:48:48.855809 2090441 system_pods.go:61] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 10:48:48.855818 2090441 system_pods.go:61] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:48.855824 2090441 system_pods.go:61] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running / Ready:ContainersNotReady (containers with unready status: [kube-scheduler]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-scheduler])
	I0916 10:48:48.855835 2090441 system_pods.go:61] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
	I0916 10:48:48.855842 2090441 system_pods.go:74] duration metric: took 11.220081ms to wait for pod list to return data ...
	I0916 10:48:48.855851 2090441 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:48:48.865905 2090441 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:48:48.865924 2090441 node_conditions.go:123] node cpu capacity is 2
	I0916 10:48:48.865935 2090441 node_conditions.go:105] duration metric: took 10.079887ms to run NodePressure ...
	I0916 10:48:48.865953 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase addon all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:49.117268 2090441 kubeadm.go:724] waiting for restarted kubelet to initialise ...
	I0916 10:48:49.129456 2090441 kubeadm.go:739] kubelet initialised
	I0916 10:48:49.129467 2090441 kubeadm.go:740] duration metric: took 12.186746ms waiting for restarted kubelet to initialise ...
	I0916 10:48:49.129475 2090441 pod_ready.go:36] extra waiting up to 4m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:49.137397 2090441 pod_ready.go:79] waiting up to 4m0s for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:49.144966 2090441 pod_ready.go:93] pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:49.144978 2090441 pod_ready.go:82] duration metric: took 7.566607ms for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:49.144988 2090441 pod_ready.go:79] waiting up to 4m0s for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:51.150942 2090441 pod_ready.go:103] pod "etcd-functional-911502" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:52.151654 2090441 pod_ready.go:93] pod "etcd-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:52.151667 2090441 pod_ready.go:82] duration metric: took 3.006671097s for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:52.151678 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:54.158468 2090441 pod_ready.go:103] pod "kube-apiserver-functional-911502" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:56.158238 2090441 pod_ready.go:93] pod "kube-apiserver-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.158254 2090441 pod_ready.go:82] duration metric: took 4.006566025s for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.158263 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.165526 2090441 pod_ready.go:93] pod "kube-controller-manager-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.165538 2090441 pod_ready.go:82] duration metric: took 7.267917ms for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.165547 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.171832 2090441 pod_ready.go:93] pod "kube-proxy-l59dx" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.171843 2090441 pod_ready.go:82] duration metric: took 6.290339ms for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.171853 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.177575 2090441 pod_ready.go:93] pod "kube-scheduler-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.177587 2090441 pod_ready.go:82] duration metric: took 5.727669ms for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.177598 2090441 pod_ready.go:39] duration metric: took 7.048114784s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:56.177613 2090441 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:48:56.185221 2090441 ops.go:34] apiserver oom_adj: -16
	I0916 10:48:56.185232 2090441 kubeadm.go:597] duration metric: took 33.279135817s to restartPrimaryControlPlane
	I0916 10:48:56.185240 2090441 kubeadm.go:394] duration metric: took 33.360438088s to StartCluster
	I0916 10:48:56.185255 2090441 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:56.185318 2090441 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:56.185924 2090441 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:56.186127 2090441 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:48:56.186461 2090441 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:56.186501 2090441 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:48:56.186566 2090441 addons.go:69] Setting storage-provisioner=true in profile "functional-911502"
	I0916 10:48:56.186578 2090441 addons.go:234] Setting addon storage-provisioner=true in "functional-911502"
	W0916 10:48:56.186583 2090441 addons.go:243] addon storage-provisioner should already be in state true
	I0916 10:48:56.186603 2090441 host.go:66] Checking if "functional-911502" exists ...
	I0916 10:48:56.186753 2090441 addons.go:69] Setting default-storageclass=true in profile "functional-911502"
	I0916 10:48:56.186769 2090441 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "functional-911502"
	I0916 10:48:56.187108 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:56.187112 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:56.191212 2090441 out.go:177] * Verifying Kubernetes components...
	I0916 10:48:56.194523 2090441 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:56.225453 2090441 addons.go:234] Setting addon default-storageclass=true in "functional-911502"
	W0916 10:48:56.225463 2090441 addons.go:243] addon default-storageclass should already be in state true
	I0916 10:48:56.225486 2090441 host.go:66] Checking if "functional-911502" exists ...
	I0916 10:48:56.225903 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:56.230244 2090441 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:48:56.235857 2090441 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:56.235869 2090441 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:48:56.235948 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:56.263017 2090441 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:56.263031 2090441 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:48:56.263097 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:56.282120 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:56.301735 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:56.353010 2090441 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:56.368505 2090441 node_ready.go:35] waiting up to 6m0s for node "functional-911502" to be "Ready" ...
	I0916 10:48:56.372081 2090441 node_ready.go:49] node "functional-911502" has status "Ready":"True"
	I0916 10:48:56.372091 2090441 node_ready.go:38] duration metric: took 3.567903ms for node "functional-911502" to be "Ready" ...
	I0916 10:48:56.372099 2090441 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:56.379540 2090441 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.407301 2090441 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:56.437317 2090441 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:56.555436 2090441 pod_ready.go:93] pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.555447 2090441 pod_ready.go:82] duration metric: took 175.892756ms for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.555456 2090441 pod_ready.go:79] waiting up to 6m0s for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.956056 2090441 pod_ready.go:93] pod "etcd-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.956068 2090441 pod_ready.go:82] duration metric: took 400.604402ms for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.956081 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.232685 2090441 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 10:48:57.235189 2090441 addons.go:510] duration metric: took 1.048680086s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 10:48:57.355420 2090441 pod_ready.go:93] pod "kube-apiserver-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:57.355430 2090441 pod_ready.go:82] duration metric: took 399.343184ms for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.355440 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.755583 2090441 pod_ready.go:93] pod "kube-controller-manager-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:57.755595 2090441 pod_ready.go:82] duration metric: took 400.148824ms for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.755605 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.155248 2090441 pod_ready.go:93] pod "kube-proxy-l59dx" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:58.155259 2090441 pod_ready.go:82] duration metric: took 399.64819ms for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.155269 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.555115 2090441 pod_ready.go:93] pod "kube-scheduler-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:58.555126 2090441 pod_ready.go:82] duration metric: took 399.851365ms for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.555137 2090441 pod_ready.go:39] duration metric: took 2.183026428s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:58.555151 2090441 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:48:58.555220 2090441 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:48:58.566810 2090441 api_server.go:72] duration metric: took 2.380658837s to wait for apiserver process to appear ...
	I0916 10:48:58.566825 2090441 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:48:58.566852 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:58.574536 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
	ok
	I0916 10:48:58.575555 2090441 api_server.go:141] control plane version: v1.31.1
	I0916 10:48:58.575570 2090441 api_server.go:131] duration metric: took 8.738422ms to wait for apiserver health ...
	I0916 10:48:58.575586 2090441 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:48:58.759444 2090441 system_pods.go:59] 8 kube-system pods found
	I0916 10:48:58.759464 2090441 system_pods.go:61] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 10:48:58.759469 2090441 system_pods.go:61] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running
	I0916 10:48:58.759474 2090441 system_pods.go:61] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:58.759478 2090441 system_pods.go:61] "kube-apiserver-functional-911502" [49c666a4-4c79-4cfd-9ed9-f8307aea2147] Running
	I0916 10:48:58.759482 2090441 system_pods.go:61] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running
	I0916 10:48:58.759485 2090441 system_pods.go:61] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:58.759488 2090441 system_pods.go:61] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running
	I0916 10:48:58.759493 2090441 system_pods.go:61] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
	I0916 10:48:58.759499 2090441 system_pods.go:74] duration metric: took 183.907696ms to wait for pod list to return data ...
	I0916 10:48:58.759506 2090441 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:48:58.956246 2090441 default_sa.go:45] found service account: "default"
	I0916 10:48:58.956260 2090441 default_sa.go:55] duration metric: took 196.748952ms for default service account to be created ...
	I0916 10:48:58.956270 2090441 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:48:59.159500 2090441 system_pods.go:86] 8 kube-system pods found
	I0916 10:48:59.159531 2090441 system_pods.go:89] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 10:48:59.159538 2090441 system_pods.go:89] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running
	I0916 10:48:59.159543 2090441 system_pods.go:89] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:59.159547 2090441 system_pods.go:89] "kube-apiserver-functional-911502" [49c666a4-4c79-4cfd-9ed9-f8307aea2147] Running
	I0916 10:48:59.159551 2090441 system_pods.go:89] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running
	I0916 10:48:59.159559 2090441 system_pods.go:89] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:59.159566 2090441 system_pods.go:89] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running
	I0916 10:48:59.159572 2090441 system_pods.go:89] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
	I0916 10:48:59.159582 2090441 system_pods.go:126] duration metric: took 203.306375ms to wait for k8s-apps to be running ...
	I0916 10:48:59.159588 2090441 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:48:59.159665 2090441 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:48:59.172329 2090441 system_svc.go:56] duration metric: took 12.729233ms WaitForService to wait for kubelet
	I0916 10:48:59.172348 2090441 kubeadm.go:582] duration metric: took 2.986201415s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:59.172365 2090441 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:48:59.356377 2090441 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:48:59.356393 2090441 node_conditions.go:123] node cpu capacity is 2
	I0916 10:48:59.356402 2090441 node_conditions.go:105] duration metric: took 184.03252ms to run NodePressure ...
	I0916 10:48:59.356414 2090441 start.go:241] waiting for startup goroutines ...
	I0916 10:48:59.356420 2090441 start.go:246] waiting for cluster config update ...
	I0916 10:48:59.356430 2090441 start.go:255] writing updated cluster config ...
	I0916 10:48:59.356743 2090441 ssh_runner.go:195] Run: rm -f paused
	I0916 10:48:59.365595 2090441 out.go:177] * Done! kubectl is now configured to use "functional-911502" cluster and "default" namespace by default
	E0916 10:48:59.367860 2090441 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	0b22f9fb6da5e       ba04bb24b9575       13 seconds ago       Exited              storage-provisioner       2                   334ec243859df       storage-provisioner
	da9cb172fba10       d3f53a98c0a9d       16 seconds ago       Running             kube-apiserver            1                   6b31db950b5d2       kube-apiserver-functional-911502
	95188fff801b2       d3f53a98c0a9d       18 seconds ago       Exited              kube-apiserver            0                   6b31db950b5d2       kube-apiserver-functional-911502
	16a7dfc9e0119       7f8aa378bb47d       18 seconds ago       Running             kube-scheduler            1                   578f22ca4016c       kube-scheduler-functional-911502
	d954d9e91e01c       279f381cb3736       18 seconds ago       Running             kube-controller-manager   1                   54de1abbce22f       kube-controller-manager-functional-911502
	1a427a607f521       27e3830e14027       22 seconds ago       Running             etcd                      1                   e43a7a67672f1       etcd-functional-911502
	472eb48e2a576       6a23fa8fd2b78       32 seconds ago       Running             kindnet-cni               1                   b400f9b4bc923       kindnet-7r2rg
	1e1c55f6d316e       24a140c548c07       32 seconds ago       Running             kube-proxy                1                   c900cfd22280f       kube-proxy-l59dx
	381c4c4cdcc1a       2f6c962e7b831       32 seconds ago       Running             coredns                   1                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	11757969f67eb       2f6c962e7b831       About a minute ago   Exited              coredns                   0                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	ce5a28d1cb5d2       6a23fa8fd2b78       About a minute ago   Exited              kindnet-cni               0                   b400f9b4bc923       kindnet-7r2rg
	57c3cd94d0c59       24a140c548c07       About a minute ago   Exited              kube-proxy                0                   c900cfd22280f       kube-proxy-l59dx
	492408bc37d38       27e3830e14027       About a minute ago   Exited              etcd                      0                   e43a7a67672f1       etcd-functional-911502
	31265291ac7da       7f8aa378bb47d       About a minute ago   Exited              kube-scheduler            0                   578f22ca4016c       kube-scheduler-functional-911502
	928f2c64d0a66       279f381cb3736       About a minute ago   Exited              kube-controller-manager   0                   54de1abbce22f       kube-controller-manager-functional-911502
	
	
	==> containerd <==
	Sep 16 10:48:43 functional-911502 containerd[3778]: time="2024-09-16T10:48:43.170653184Z" level=info msg="StopContainer for \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\" returns successfully"
	Sep 16 10:48:43 functional-911502 containerd[3778]: time="2024-09-16T10:48:43.171549318Z" level=info msg="StopPodSandbox for \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\""
	Sep 16 10:48:43 functional-911502 containerd[3778]: time="2024-09-16T10:48:43.171714240Z" level=info msg="Container to stop \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\" must be in running or unknown state, current state \"CONTAINER_EXITED\""
	Sep 16 10:48:43 functional-911502 containerd[3778]: time="2024-09-16T10:48:43.171792607Z" level=info msg="Container to stop \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\" must be in running or unknown state, current state \"CONTAINER_EXITED\""
	Sep 16 10:48:43 functional-911502 containerd[3778]: time="2024-09-16T10:48:43.172051198Z" level=info msg="TearDown network for sandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" successfully"
	Sep 16 10:48:43 functional-911502 containerd[3778]: time="2024-09-16T10:48:43.172143029Z" level=info msg="StopPodSandbox for \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" returns successfully"
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.038231814Z" level=info msg="RemoveContainer for \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\""
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.049226017Z" level=info msg="CreateContainer within sandbox \"6b31db950b5d280a5feb49909dd8de1ece9d6371214eb94fe9fc239781f367ab\" for container &ContainerMetadata{Name:kube-apiserver,Attempt:1,}"
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.052172641Z" level=info msg="RemoveContainer for \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\" returns successfully"
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.055100220Z" level=info msg="RemoveContainer for \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\""
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.063715608Z" level=info msg="RemoveContainer for \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\" returns successfully"
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.068510794Z" level=error msg="ContainerStatus for \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\" failed" error="rpc error: code = NotFound desc = an error occurred when try to find container \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\": not found"
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.070325855Z" level=error msg="ContainerStatus for \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\" failed" error="rpc error: code = NotFound desc = an error occurred when try to find container \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": not found"
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.079627605Z" level=info msg="CreateContainer within sandbox \"6b31db950b5d280a5feb49909dd8de1ece9d6371214eb94fe9fc239781f367ab\" for &ContainerMetadata{Name:kube-apiserver,Attempt:1,} returns container id \"da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12\""
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.080253645Z" level=info msg="StartContainer for \"da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12\""
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.223110276Z" level=info msg="StartContainer for \"da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12\" returns successfully"
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.551867636Z" level=info msg="CreateContainer within sandbox \"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c\" for container &ContainerMetadata{Name:storage-provisioner,Attempt:2,}"
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.568904337Z" level=info msg="CreateContainer within sandbox \"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c\" for &ContainerMetadata{Name:storage-provisioner,Attempt:2,} returns container id \"0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc\""
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.569729562Z" level=info msg="StartContainer for \"0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc\""
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.631010947Z" level=info msg="StartContainer for \"0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc\" returns successfully"
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.663906337Z" level=info msg="shim disconnected" id=0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc namespace=k8s.io
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.664118020Z" level=warning msg="cleaning up after shim disconnected" id=0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc namespace=k8s.io
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.664142586Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:48:48 functional-911502 containerd[3778]: time="2024-09-16T10:48:48.087297687Z" level=info msg="RemoveContainer for \"ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be\""
	Sep 16 10:48:48 functional-911502 containerd[3778]: time="2024-09-16T10:48:48.096408333Z" level=info msg="RemoveContainer for \"ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be\" returns successfully"
	
	
	==> coredns [11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39497 - 35637 "HINFO IN 756688810597303784.5152931065563193714. udp 56 false 512" NXDOMAIN qr,rd,ra 56 0.040061481s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> coredns [381c4c4cdcc1a65a0e4d935f5449da5929d2fea3cf4e8c057860064146546ba0] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:57690 - 32762 "HINFO IN 7394603856605586965.6142061183963741332. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.012169588s
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: services is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "services" in API group "" at the cluster scope
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "services" in API group "" at the cluster scope
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: namespaces is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "namespaces" in API group "" at the cluster scope
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "namespaces" in API group "" at the cluster scope
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	
	
	==> describe nodes <==
	Name:               functional-911502
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-911502
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-911502
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_40_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:47:36 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-911502
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:48:52 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:36 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-911502
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 43a21049ac0d40628479cf884a8089e0
	  System UUID:                2830f6a5-4b63-46c5-b24a-468e4df19b79
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-6kw9d                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     77s
	  kube-system                 etcd-functional-911502                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         84s
	  kube-system                 kindnet-7r2rg                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      77s
	  kube-system                 kube-apiserver-functional-911502             250m (12%)    0 (0%)      0 (0%)           0 (0%)         14s
	  kube-system                 kube-controller-manager-functional-911502    200m (10%)    0 (0%)      0 (0%)           0 (0%)         82s
	  kube-system                 kube-proxy-l59dx                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         77s
	  kube-system                 kube-scheduler-functional-911502             100m (5%)     0 (0%)      0 (0%)           0 (0%)         82s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         77s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 76s                kube-proxy       
	  Normal   Starting                 13s                kube-proxy       
	  Normal   NodeAllocatableEnforced  82s                kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 82s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  82s                kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    82s                kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     82s                kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   Starting                 82s                kubelet          Starting kubelet.
	  Normal   RegisteredNode           78s                node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	  Normal   Starting                 21s                kubelet          Starting kubelet.
	  Warning  CgroupV1                 21s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  20s (x8 over 20s)  kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    20s (x7 over 20s)  kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     20s (x7 over 20s)  kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  20s                kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           11s                node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [1a427a607f52143d7ababbbe77d9ccd5fb21bed4f47e6ea656a489787066bdd5] <==
	{"level":"info","ts":"2024-09-16T10:48:38.695375Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:38.695629Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.695854Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.695783Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:38.699522Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:38.699870Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:38.700575Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.701150Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.700996Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:40.180057Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180110Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180142Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180157Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180164Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180195Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180204Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.183853Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:40.183916Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:40.184260Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:40.185178Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:40.186391Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:40.187443Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:40.188457Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:40.194734Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:40.194782Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> etcd [492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31] <==
	{"level":"info","ts":"2024-09-16T10:47:33.255286Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.255359Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.262769Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.270884Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:47:33.271109Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.271402Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.272620Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.283805Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.271431Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284172Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284977Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.289591Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.306735Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306879Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306916Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.344113Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:48:38.344168Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-911502","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:48:38.344265Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.344295Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.345876Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.345916Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:48:38.345965Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:48:38.347534Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.347628Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.347663Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-911502","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:49:01 up 1 day, 14:31,  0 users,  load average: 1.82, 1.30, 1.39
	Linux functional-911502 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [472eb48e2a57659caeaf99025beaec2f96e01b98d7d3d7676515ac24fb61fb58] <==
	W0916 10:48:30.515225       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	E0916 10:48:30.515274       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Pod: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	W0916 10:48:30.655352       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	E0916 10:48:30.655402       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Node: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	W0916 10:48:31.919152       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	E0916 10:48:31.919195       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.NetworkPolicy: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	W0916 10:48:32.254615       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	E0916 10:48:32.254661       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	W0916 10:48:32.620984       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	E0916 10:48:32.621026       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Pod: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	W0916 10:48:33.587470       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	E0916 10:48:33.587525       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Node: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	W0916 10:48:36.633382       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	E0916 10:48:36.633483       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Pod: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	W0916 10:48:37.401259       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	E0916 10:48:37.401307       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	W0916 10:48:37.743138       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	E0916 10:48:37.743179       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.NetworkPolicy: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	W0916 10:48:42.091074       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Node: nodes is forbidden: User "system:serviceaccount:kube-system:kindnet" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:48:42.091119       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User "system:serviceaccount:kube-system:kindnet" cannot list resource "nodes" in API group "" at the cluster scope
	I0916 10:48:50.320990       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:48:50.321150       1 metrics.go:61] Registering metrics
	I0916 10:48:50.321308       1 controller.go:374] Syncing nftables rules
	I0916 10:48:59.220364       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:59.220423       1 main.go:299] handling current node
	
	
	==> kindnet [ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b] <==
	I0916 10:47:45.041351       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:47:45.041663       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:47:45.041804       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:47:45.041819       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:47:45.041830       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:47:45.520963       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:47:45.521152       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:47:45.521205       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:47:45.722171       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:47:45.722199       1 metrics.go:61] Registering metrics
	I0916 10:47:45.722266       1 controller.go:374] Syncing nftables rules
	I0916 10:47:55.524822       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:47:55.524859       1 main.go:299] handling current node
	I0916 10:48:05.526968       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:05.527019       1 main.go:299] handling current node
	I0916 10:48:15.528186       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:15.528219       1 main.go:299] handling current node
	I0916 10:48:25.523558       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:25.523675       1 main.go:299] handling current node
	
	
	==> kube-apiserver [95188fff801b22ea7d5c57a472a58be0bc02010422f1867802de90863ce56801] <==
	I0916 10:48:42.839848       1 options.go:228] external host was not specified, using 192.168.49.2
	I0916 10:48:42.855248       1 server.go:142] Version: v1.31.1
	I0916 10:48:42.855626       1 server.go:144] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	E0916 10:48:42.856037       1 run.go:72] "command failed" err="failed to create listener: failed to listen on 0.0.0.0:8441: listen tcp 0.0.0.0:8441: bind: address already in use"
	
	
	==> kube-apiserver [da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12] <==
	I0916 10:48:47.150778       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:48:47.150982       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:48:47.151288       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:48:47.151407       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:48:47.151562       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:47.157426       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:47.157660       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:47.159024       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:48:47.159224       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:47.159303       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:47.159367       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:47.159432       1 cache.go:39] Caches are synced for autoregister controller
	I0916 10:48:47.189320       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:48:47.189540       1 policy_source.go:224] refreshing policies
	I0916 10:48:47.191266       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:48:47.230910       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:47.961268       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	W0916 10:48:48.300845       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2]
	I0916 10:48:48.302392       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:48:48.308164       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:48:48.837004       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:48:48.966544       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:48:48.985830       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:48:49.056843       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:48:49.065696       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	
	
	==> kube-controller-manager [928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a] <==
	I0916 10:47:43.260717       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:47:43.260741       1 shared_informer.go:320] Caches are synced for legacy-service-account-token-cleaner
	I0916 10:47:43.260789       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:47:43.264947       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:47:43.270925       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:43.274016       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:47:43.289621       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:47:43.309403       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:47:43.755815       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810781       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810815       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:47:43.822097       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:44.310052       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="384.204702ms"
	I0916 10:47:44.362819       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="52.71544ms"
	I0916 10:47:44.426732       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="63.859952ms"
	I0916 10:47:44.459758       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="32.950043ms"
	I0916 10:47:44.479101       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="19.296296ms"
	I0916 10:47:44.479182       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.386µs"
	I0916 10:47:46.277218       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="54.999µs"
	I0916 10:47:46.284221       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="47.499µs"
	I0916 10:47:46.289165       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.016µs"
	I0916 10:47:49.776695       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:56.302801       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="57.165µs"
	I0916 10:47:57.316220       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="18.588165ms"
	I0916 10:47:57.316368       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="102.169µs"
	
	
	==> kube-controller-manager [d954d9e91e01c0d0330d1084c35e9e29fd31db11cc4bc7b4efdc63e3c17afd45] <==
	I0916 10:48:50.420590       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 10:48:50.420681       1 shared_informer.go:320] Caches are synced for disruption
	I0916 10:48:50.420809       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:48:50.420721       1 shared_informer.go:320] Caches are synced for validatingadmissionpolicy-status
	I0916 10:48:50.421043       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="99.405µs"
	I0916 10:48:50.423833       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:50.426496       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:50.431094       1 shared_informer.go:320] Caches are synced for TTL after finished
	I0916 10:48:50.433168       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:50.436096       1 shared_informer.go:320] Caches are synced for ReplicationController
	I0916 10:48:50.508138       1 shared_informer.go:320] Caches are synced for HPA
	I0916 10:48:50.570351       1 shared_informer.go:320] Caches are synced for expand
	I0916 10:48:50.594635       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:48:50.603270       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:48:50.620872       1 shared_informer.go:320] Caches are synced for ephemeral
	I0916 10:48:50.621193       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:50.621458       1 shared_informer.go:320] Caches are synced for PVC protection
	I0916 10:48:50.638042       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:50.660352       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:48:50.681442       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:51.072854       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:51.089681       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:51.090358       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:48:56.739270       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="40.969915ms"
	I0916 10:48:56.739344       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="41.895µs"
	
	
	==> kube-proxy [1e1c55f6d316ecfe21daf226fdfd7a0c46b596f2904f3881ac5d62f21f9aa385] <==
	I0916 10:48:28.645183       1 server_linux.go:66] "Using iptables proxy"
	E0916 10:48:28.721587       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:29.873863       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:32.256620       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:36.493031       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	I0916 10:48:47.226267       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:47.226420       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:47.297852       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:47.298129       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:47.300753       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:47.301480       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:47.301624       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:47.312238       1 config.go:199] "Starting service config controller"
	I0916 10:48:47.312282       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:47.312370       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:47.312419       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:47.317887       1 config.go:328] "Starting node config controller"
	I0916 10:48:47.317941       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:47.413200       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:47.413370       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:47.418734       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6] <==
	I0916 10:47:44.794046       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:47:44.895525       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:47:44.895614       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:47:44.916570       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:47:44.916637       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:47:44.918597       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:47:44.919425       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:47:44.919452       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:47:44.925419       1 config.go:199] "Starting service config controller"
	I0916 10:47:44.925472       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:47:44.925521       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:47:44.925531       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:47:44.928903       1 config.go:328] "Starting node config controller"
	I0916 10:47:44.928927       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:47:45.030179       1 shared_informer.go:320] Caches are synced for node config
	I0916 10:47:45.030253       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:47:45.030287       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [16a7dfc9e01198cab582eac129b2a3162c14e01f4ade815bbd0695fd67b02c4c] <==
	I0916 10:48:43.411981       1 serving.go:386] Generated self-signed cert in-memory
	W0916 10:48:47.054124       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 10:48:47.054270       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 10:48:47.054329       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 10:48:47.054367       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 10:48:47.137658       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:47.137797       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:47.143443       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:47.143739       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:47.146739       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:47.146832       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:47.247771       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44] <==
	E0916 10:47:37.419493       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419534       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:47:37.419548       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419590       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:47:37.419607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419645       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419660       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419704       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419719       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422428       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:47:37.422472       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422530       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:47:37.422547       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422779       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 10:47:37.422805       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 10:47:37.422862       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:47:37.422883       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424481       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.424516       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424588       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:47:37.424607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424692       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:47:37.424724       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:47:38.910602       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	E0916 10:48:38.401394       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kubelet <==
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.041321    4699 scope.go:117] "RemoveContainer" containerID="95188fff801b22ea7d5c57a472a58be0bc02010422f1867802de90863ce56801"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.041619    4699 status_manager.go:851] "Failed to get status for pod" podUID="c7ab8017ca620f2ba7e026f4cdb427a2" pod="kube-system/etcd-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/etcd-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.041817    4699 status_manager.go:851] "Failed to get status for pod" podUID="0082f76f53cc9a35311f900de9a4ce8a" pod="kube-system/kube-apiserver-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.041988    4699 status_manager.go:851] "Failed to get status for pod" podUID="26c4a2e985a1c721e0411e5d9497a35b" pod="kube-system/kube-controller-manager-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.042149    4699 status_manager.go:851] "Failed to get status for pod" podUID="69dae9ff35c780f43a15f539d6f19e46" pod="kube-system/kube-scheduler-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.042319    4699 status_manager.go:851] "Failed to get status for pod" podUID="72a26843-9f97-4121-91f0-3cb389048315" pod="kube-system/kube-proxy-l59dx" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-proxy-l59dx\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.052916    4699 scope.go:117] "RemoveContainer" containerID="a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.064122    4699 scope.go:117] "RemoveContainer" containerID="dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.069838    4699 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = an error occurred when try to find container \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\": not found" containerID="dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.069900    4699 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa"} err="failed to get container status \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\": rpc error: code = NotFound desc = an error occurred when try to find container \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\": not found"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.069984    4699 scope.go:117] "RemoveContainer" containerID="a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.070544    4699 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = an error occurred when try to find container \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": not found" containerID="a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.070591    4699 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"} err="failed to get container status \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": rpc error: code = NotFound desc = an error occurred when try to find container \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": not found"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: W0916 10:48:44.086224    4699 reflector.go:561] object-"kube-system"/"coredns": failed to list *v1.ConfigMap: Get "https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/configmaps?fieldSelector=metadata.name%3Dcoredns&resourceVersion=447": dial tcp 192.168.49.2:8441: connect: connection refused
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.086285    4699 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"coredns\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/configmaps?fieldSelector=metadata.name%3Dcoredns&resourceVersion=447\": dial tcp 192.168.49.2:8441: connect: connection refused" logger="UnhandledError"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: W0916 10:48:44.257240    4699 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://control-plane.minikube.internal:8441/apis/node.k8s.io/v1/runtimeclasses?resourceVersion=447": dial tcp 192.168.49.2:8441: connect: connection refused
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.257307    4699 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://control-plane.minikube.internal:8441/apis/node.k8s.io/v1/runtimeclasses?resourceVersion=447\": dial tcp 192.168.49.2:8441: connect: connection refused" logger="UnhandledError"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.911110    4699 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="846e81f7bcac6804cf5ef499ea5ac265" path="/var/lib/kubelet/pods/846e81f7bcac6804cf5ef499ea5ac265/volumes"
	Sep 16 10:48:45 functional-911502 kubelet[4699]: I0916 10:48:45.045335    4699 kubelet.go:1895] "Trying to delete pod" pod="kube-system/kube-apiserver-functional-911502" podUID="d399bd77-51dd-4ad3-90d4-6cf11e9e156e"
	Sep 16 10:48:47 functional-911502 kubelet[4699]: I0916 10:48:47.316886    4699 kubelet.go:1900] "Deleted mirror pod because it is outdated" pod="kube-system/kube-apiserver-functional-911502"
	Sep 16 10:48:47 functional-911502 kubelet[4699]: I0916 10:48:47.549623    4699 scope.go:117] "RemoveContainer" containerID="ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: I0916 10:48:48.083956    4699 scope.go:117] "RemoveContainer" containerID="ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: I0916 10:48:48.084957    4699 scope.go:117] "RemoveContainer" containerID="0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: E0916 10:48:48.085234    4699 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"storage-provisioner\" with CrashLoopBackOff: \"back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(ecac562d-8318-4226-b5f1-61f2c76bb51b)\"" pod="kube-system/storage-provisioner" podUID="ecac562d-8318-4226-b5f1-61f2c76bb51b"
	Sep 16 10:48:51 functional-911502 kubelet[4699]: I0916 10:48:51.367187    4699 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-apiserver-functional-911502" podStartSLOduration=4.367170601 podStartE2EDuration="4.367170601s" podCreationTimestamp="2024-09-16 10:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:48:48.153684049 +0000 UTC m=+7.599562137" watchObservedRunningTime="2024-09-16 10:48:51.367170601 +0000 UTC m=+10.813048706"
	
	
	==> storage-provisioner [0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc] <==
	I0916 10:48:47.636953       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	F0916 10:48:47.638558       1 main.go:39] error getting server version: Get "https://10.96.0.1:443/version?timeout=32s": dial tcp 10.96.0.1:443: connect: connection refused
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-911502 -n functional-911502
helpers_test.go:261: (dbg) Run:  kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (479.283µs)
helpers_test.go:263: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/serial/ComponentHealth (2.87s)

                                                
                                    
x
+
TestFunctional/serial/InvalidService (0s)

                                                
                                                
=== RUN   TestFunctional/serial/InvalidService
functional_test.go:2321: (dbg) Run:  kubectl --context functional-911502 apply -f testdata/invalidsvc.yaml
functional_test.go:2321: (dbg) Non-zero exit: kubectl --context functional-911502 apply -f testdata/invalidsvc.yaml: fork/exec /usr/local/bin/kubectl: exec format error (453.7µs)
functional_test.go:2323: kubectl --context functional-911502 apply -f testdata/invalidsvc.yaml failed: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/serial/InvalidService (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/DashboardCmd (7.19s)

                                                
                                                
=== RUN   TestFunctional/parallel/DashboardCmd
=== PAUSE TestFunctional/parallel/DashboardCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DashboardCmd
functional_test.go:905: (dbg) daemon: [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-911502 --alsologtostderr -v=1]
functional_test.go:918: output didn't produce a URL
functional_test.go:910: (dbg) stopping [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-911502 --alsologtostderr -v=1] ...
functional_test.go:910: (dbg) [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-911502 --alsologtostderr -v=1] stdout:
functional_test.go:910: (dbg) [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-911502 --alsologtostderr -v=1] stderr:
I0916 10:50:59.772171 2096953 out.go:345] Setting OutFile to fd 1 ...
I0916 10:50:59.773353 2096953 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:50:59.773368 2096953 out.go:358] Setting ErrFile to fd 2...
I0916 10:50:59.773374 2096953 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:50:59.773664 2096953 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
I0916 10:50:59.773938 2096953 mustload.go:65] Loading cluster: functional-911502
I0916 10:50:59.774402 2096953 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:50:59.774937 2096953 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
I0916 10:50:59.792005 2096953 host.go:66] Checking if "functional-911502" exists ...
I0916 10:50:59.792327 2096953 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I0916 10:50:59.856480 2096953 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:48 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:50:59.845925943 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aarc
h64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
I0916 10:50:59.856607 2096953 api_server.go:166] Checking apiserver status ...
I0916 10:50:59.856671 2096953 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I0916 10:50:59.856722 2096953 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
I0916 10:50:59.873060 2096953 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
I0916 10:50:59.977826 2096953 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/4988/cgroup
I0916 10:50:59.988438 2096953 api_server.go:182] apiserver freezer: "13:freezer:/docker/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/kubepods/burstable/pod0082f76f53cc9a35311f900de9a4ce8a/da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12"
I0916 10:50:59.988526 2096953 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/kubepods/burstable/pod0082f76f53cc9a35311f900de9a4ce8a/da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12/freezer.state
I0916 10:51:00.001294 2096953 api_server.go:204] freezer state: "THAWED"
I0916 10:51:00.001327 2096953 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
I0916 10:51:00.015568 2096953 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
ok
W0916 10:51:00.015612 2096953 out.go:270] * Enabling dashboard ...
* Enabling dashboard ...
I0916 10:51:00.015829 2096953 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:51:00.015847 2096953 addons.go:69] Setting dashboard=true in profile "functional-911502"
I0916 10:51:00.015862 2096953 addons.go:234] Setting addon dashboard=true in "functional-911502"
I0916 10:51:00.015890 2096953 host.go:66] Checking if "functional-911502" exists ...
I0916 10:51:00.016389 2096953 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
I0916 10:51:00.051963 2096953 out.go:177]   - Using image docker.io/kubernetesui/dashboard:v2.7.0
I0916 10:51:00.054865 2096953 out.go:177]   - Using image docker.io/kubernetesui/metrics-scraper:v1.0.8
I0916 10:51:00.062921 2096953 addons.go:431] installing /etc/kubernetes/addons/dashboard-ns.yaml
I0916 10:51:00.062978 2096953 ssh_runner.go:362] scp dashboard/dashboard-ns.yaml --> /etc/kubernetes/addons/dashboard-ns.yaml (759 bytes)
I0916 10:51:00.063063 2096953 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
I0916 10:51:00.114390 2096953 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
I0916 10:51:00.299609 2096953 addons.go:431] installing /etc/kubernetes/addons/dashboard-clusterrole.yaml
I0916 10:51:00.299664 2096953 ssh_runner.go:362] scp dashboard/dashboard-clusterrole.yaml --> /etc/kubernetes/addons/dashboard-clusterrole.yaml (1001 bytes)
I0916 10:51:00.358442 2096953 addons.go:431] installing /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml
I0916 10:51:00.358477 2096953 ssh_runner.go:362] scp dashboard/dashboard-clusterrolebinding.yaml --> /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml (1018 bytes)
I0916 10:51:00.406719 2096953 addons.go:431] installing /etc/kubernetes/addons/dashboard-configmap.yaml
I0916 10:51:00.406744 2096953 ssh_runner.go:362] scp dashboard/dashboard-configmap.yaml --> /etc/kubernetes/addons/dashboard-configmap.yaml (837 bytes)
I0916 10:51:00.446610 2096953 addons.go:431] installing /etc/kubernetes/addons/dashboard-dp.yaml
I0916 10:51:00.446632 2096953 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/dashboard-dp.yaml (4288 bytes)
I0916 10:51:00.473236 2096953 addons.go:431] installing /etc/kubernetes/addons/dashboard-role.yaml
I0916 10:51:00.473263 2096953 ssh_runner.go:362] scp dashboard/dashboard-role.yaml --> /etc/kubernetes/addons/dashboard-role.yaml (1724 bytes)
I0916 10:51:00.501760 2096953 addons.go:431] installing /etc/kubernetes/addons/dashboard-rolebinding.yaml
I0916 10:51:00.501785 2096953 ssh_runner.go:362] scp dashboard/dashboard-rolebinding.yaml --> /etc/kubernetes/addons/dashboard-rolebinding.yaml (1046 bytes)
I0916 10:51:00.527942 2096953 addons.go:431] installing /etc/kubernetes/addons/dashboard-sa.yaml
I0916 10:51:00.528015 2096953 ssh_runner.go:362] scp dashboard/dashboard-sa.yaml --> /etc/kubernetes/addons/dashboard-sa.yaml (837 bytes)
I0916 10:51:00.551724 2096953 addons.go:431] installing /etc/kubernetes/addons/dashboard-secret.yaml
I0916 10:51:00.551753 2096953 ssh_runner.go:362] scp dashboard/dashboard-secret.yaml --> /etc/kubernetes/addons/dashboard-secret.yaml (1389 bytes)
I0916 10:51:00.574006 2096953 addons.go:431] installing /etc/kubernetes/addons/dashboard-svc.yaml
I0916 10:51:00.574029 2096953 ssh_runner.go:362] scp dashboard/dashboard-svc.yaml --> /etc/kubernetes/addons/dashboard-svc.yaml (1294 bytes)
I0916 10:51:00.603793 2096953 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml
I0916 10:51:02.228429 2096953 ssh_runner.go:235] Completed: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/dashboard-ns.yaml -f /etc/kubernetes/addons/dashboard-clusterrole.yaml -f /etc/kubernetes/addons/dashboard-clusterrolebinding.yaml -f /etc/kubernetes/addons/dashboard-configmap.yaml -f /etc/kubernetes/addons/dashboard-dp.yaml -f /etc/kubernetes/addons/dashboard-role.yaml -f /etc/kubernetes/addons/dashboard-rolebinding.yaml -f /etc/kubernetes/addons/dashboard-sa.yaml -f /etc/kubernetes/addons/dashboard-secret.yaml -f /etc/kubernetes/addons/dashboard-svc.yaml: (1.62459041s)
I0916 10:51:02.231465 2096953 out.go:177] * Some dashboard features require the metrics-server addon. To enable all features please run:

                                                
                                                
	minikube -p functional-911502 addons enable metrics-server

                                                
                                                
I0916 10:51:02.233967 2096953 addons.go:197] Writing out "functional-911502" config to set dashboard=true...
W0916 10:51:02.234264 2096953 out.go:270] * Verifying dashboard health ...
* Verifying dashboard health ...
I0916 10:51:02.235170 2096953 kapi.go:59] client config for functional-911502: &rest.Config{Host:"https://192.168.49.2:8441", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil
), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
I0916 10:51:02.255071 2096953 service.go:214] Found service: &Service{ObjectMeta:{kubernetes-dashboard  kubernetes-dashboard  d296ecc2-858c-4694-8530-37d51a728756 690 0 2024-09-16 10:51:02 +0000 UTC <nil> <nil> map[addonmanager.kubernetes.io/mode:Reconcile k8s-app:kubernetes-dashboard kubernetes.io/minikube-addons:dashboard] map[kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"addonmanager.kubernetes.io/mode":"Reconcile","k8s-app":"kubernetes-dashboard","kubernetes.io/minikube-addons":"dashboard"},"name":"kubernetes-dashboard","namespace":"kubernetes-dashboard"},"spec":{"ports":[{"port":80,"targetPort":9090}],"selector":{"k8s-app":"kubernetes-dashboard"}}}
] [] [] [{kubectl-client-side-apply Update v1 2024-09-16 10:51:02 +0000 UTC FieldsV1 {"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{}},"f:labels":{".":{},"f:addonmanager.kubernetes.io/mode":{},"f:k8s-app":{},"f:kubernetes.io/minikube-addons":{}}},"f:spec":{"f:internalTrafficPolicy":{},"f:ports":{".":{},"k:{\"port\":80,\"protocol\":\"TCP\"}":{".":{},"f:port":{},"f:protocol":{},"f:targetPort":{}}},"f:selector":{},"f:sessionAffinity":{},"f:type":{}}} }]},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:,Protocol:TCP,Port:80,TargetPort:{0 9090 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: kubernetes-dashboard,},ClusterIP:10.111.52.145,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.111.52.145],IPFamilies:[IPv4],AllocateLoadBalance
rNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}
W0916 10:51:02.257589 2096953 out.go:270] * Launching proxy ...
* Launching proxy ...
I0916 10:51:02.257688 2096953 dashboard.go:152] Executing: /usr/local/bin/kubectl [/usr/local/bin/kubectl --context functional-911502 proxy --port 36195]
I0916 10:51:02.261289 2096953 out.go:201] 
W0916 10:51:02.263891 2096953 out.go:270] X Exiting due to HOST_KUBECTL_PROXY: kubectl proxy: proxy start: fork/exec /usr/local/bin/kubectl: exec format error
X Exiting due to HOST_KUBECTL_PROXY: kubectl proxy: proxy start: fork/exec /usr/local/bin/kubectl: exec format error
W0916 10:51:02.263946 2096953 out.go:270] * 
* 
W0916 10:51:02.628712 2096953 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
│                                                                                             │
│    * If the above advice does not help, please let us know:                                 │
│      https://github.com/kubernetes/minikube/issues/new/choose                               │
│                                                                                             │
│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
│    * Please also attach the following file to the GitHub issue:                             │
│    * - /tmp/minikube_mount_ddcbe6092c268ec94417e2287b79aaeddb405e1e_0.log                   │
│                                                                                             │
╰─────────────────────────────────────────────────────────────────────────────────────────────╯
╭─────────────────────────────────────────────────────────────────────────────────────────────╮
│                                                                                             │
│    * If the above advice does not help, please let us know:                                 │
│      https://github.com/kubernetes/minikube/issues/new/choose                               │
│                                                                                             │
│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
│    * Please also attach the following file to the GitHub issue:                             │
│    * - /tmp/minikube_mount_ddcbe6092c268ec94417e2287b79aaeddb405e1e_0.log                   │
│                                                                                             │
╰─────────────────────────────────────────────────────────────────────────────────────────────╯
I0916 10:51:02.631653 2096953 out.go:201] 
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/parallel/DashboardCmd]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-911502
helpers_test.go:235: (dbg) docker inspect functional-911502:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1",
	        "Created": "2024-09-16T10:47:14.597354828Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2085675,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:47:14.7319597Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hostname",
	        "HostsPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hosts",
	        "LogPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1-json.log",
	        "Name": "/functional-911502",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-911502:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-911502",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/merged",
	                "UpperDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/diff",
	                "WorkDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-911502",
	                "Source": "/var/lib/docker/volumes/functional-911502/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-911502",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-911502",
	                "name.minikube.sigs.k8s.io": "functional-911502",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "44bb4c6f2ec0f0eef04adb8f886d0e0de7d31ae50612de741bed0ee945b2b75e",
	            "SandboxKey": "/var/run/docker/netns/44bb4c6f2ec0",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40592"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40593"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40596"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40594"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40595"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-911502": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "8c4428adf23c812318456ac17bea5953b33d7961994dfc84c0ff82a45764b662",
	                    "EndpointID": "8b3cc6f2c9f87b61b7e755d7ecd320ed6313887dfb3deab9f4e0858aa1c9fe80",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-911502",
	                        "9bf795605895"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-911502 -n functional-911502
helpers_test.go:244: <<< TestFunctional/parallel/DashboardCmd FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/DashboardCmd]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 logs -n 25: (3.039877844s)
helpers_test.go:252: TestFunctional/parallel/DashboardCmd logs: 
-- stdout --
	
	==> Audit <==
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	|  Command  |                                  Args                                   |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| service   | functional-911502 service                                               | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | --namespace=default --https                                             |                   |         |         |                     |                     |
	|           | --url hello-node                                                        |                   |         |         |                     |                     |
	| service   | functional-911502                                                       | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | service hello-node --url                                                |                   |         |         |                     |                     |
	|           | --format={{.IP}}                                                        |                   |         |         |                     |                     |
	| service   | functional-911502 service                                               | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | hello-node --url                                                        |                   |         |         |                     |                     |
	| mount     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdany-port726931808/001:/mount-9p      |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh -- ls                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|           | -la /mount-9p                                                           |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh cat                                               | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|           | /mount-9p/test-1726483856463003754                                      |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh mount |                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | grep 9p; ls -la /mount-9p; cat                                          |                   |         |         |                     |                     |
	|           | /mount-9p/pod-dates                                                     |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh sudo                                              | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|           | umount -f /mount-9p                                                     |                   |         |         |                     |                     |
	| start     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | --dry-run --memory                                                      |                   |         |         |                     |                     |
	|           | 250MB --alsologtostderr                                                 |                   |         |         |                     |                     |
	|           | --driver=docker                                                         |                   |         |         |                     |                     |
	|           | --container-runtime=containerd                                          |                   |         |         |                     |                     |
	| start     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | --dry-run --memory                                                      |                   |         |         |                     |                     |
	|           | 250MB --alsologtostderr                                                 |                   |         |         |                     |                     |
	|           | --driver=docker                                                         |                   |         |         |                     |                     |
	|           | --container-runtime=containerd                                          |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| mount     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdspecific-port819290008/001:/mount-9p |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1 --port 46464                                     |                   |         |         |                     |                     |
	| start     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | --dry-run --alsologtostderr                                             |                   |         |         |                     |                     |
	|           | -v=1 --driver=docker                                                    |                   |         |         |                     |                     |
	|           | --container-runtime=containerd                                          |                   |         |         |                     |                     |
	| dashboard | --url --port 36195                                                      | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | -p functional-911502                                                    |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:51 UTC |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh -- ls                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -la /mount-9p                                                           |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh sudo                                              | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | umount -f /mount-9p                                                     |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | -T /mount1                                                              |                   |         |         |                     |                     |
	| mount     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount2  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| mount     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount1  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| mount     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount3  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -T /mount1                                                              |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | -T /mount2                                                              |                   |         |         |                     |                     |
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:50:59
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:50:59.466349 2096800 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:50:59.466944 2096800 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:50:59.466996 2096800 out.go:358] Setting ErrFile to fd 2...
	I0916 10:50:59.467017 2096800 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:50:59.467723 2096800 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:50:59.468236 2096800 out.go:352] Setting JSON to false
	I0916 10:50:59.469291 2096800 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":138802,"bootTime":1726345058,"procs":208,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:50:59.469449 2096800 start.go:139] virtualization:  
	I0916 10:50:59.473685 2096800 out.go:177] * [functional-911502] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:50:59.476767 2096800 notify.go:220] Checking for updates...
	I0916 10:50:59.484797 2096800 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:50:59.487415 2096800 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:50:59.489703 2096800 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:50:59.492387 2096800 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:50:59.495107 2096800 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:50:59.498996 2096800 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:50:59.502035 2096800 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:50:59.502801 2096800 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:50:59.537313 2096800 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:50:59.537431 2096800 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:50:59.631678 2096800 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:48 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:50:59.621432888 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:50:59.631792 2096800 docker.go:318] overlay module found
	I0916 10:50:59.634922 2096800 out.go:177] * Using the docker driver based on existing profile
	I0916 10:50:59.637406 2096800 start.go:297] selected driver: docker
	I0916 10:50:59.637430 2096800 start.go:901] validating driver "docker" against &{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:do
cker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:50:59.637566 2096800 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:50:59.637694 2096800 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:50:59.716778 2096800 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:48 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:50:59.707082204 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:50:59.717242 2096800 cni.go:84] Creating CNI manager for ""
	I0916 10:50:59.717297 2096800 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:50:59.717351 2096800 start.go:340] cluster config:
	{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUI
D:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:50:59.720176 2096800 out.go:177] * dry-run validation complete!
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	494883dd75dac       ba04bb24b9575       2 minutes ago       Running             storage-provisioner       3                   334ec243859df       storage-provisioner
	0b22f9fb6da5e       ba04bb24b9575       2 minutes ago       Exited              storage-provisioner       2                   334ec243859df       storage-provisioner
	da9cb172fba10       d3f53a98c0a9d       2 minutes ago       Running             kube-apiserver            1                   6b31db950b5d2       kube-apiserver-functional-911502
	95188fff801b2       d3f53a98c0a9d       2 minutes ago       Exited              kube-apiserver            0                   6b31db950b5d2       kube-apiserver-functional-911502
	16a7dfc9e0119       7f8aa378bb47d       2 minutes ago       Running             kube-scheduler            1                   578f22ca4016c       kube-scheduler-functional-911502
	d954d9e91e01c       279f381cb3736       2 minutes ago       Running             kube-controller-manager   1                   54de1abbce22f       kube-controller-manager-functional-911502
	1a427a607f521       27e3830e14027       2 minutes ago       Running             etcd                      1                   e43a7a67672f1       etcd-functional-911502
	472eb48e2a576       6a23fa8fd2b78       2 minutes ago       Running             kindnet-cni               1                   b400f9b4bc923       kindnet-7r2rg
	1e1c55f6d316e       24a140c548c07       2 minutes ago       Running             kube-proxy                1                   c900cfd22280f       kube-proxy-l59dx
	381c4c4cdcc1a       2f6c962e7b831       2 minutes ago       Running             coredns                   1                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	11757969f67eb       2f6c962e7b831       3 minutes ago       Exited              coredns                   0                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	ce5a28d1cb5d2       6a23fa8fd2b78       3 minutes ago       Exited              kindnet-cni               0                   b400f9b4bc923       kindnet-7r2rg
	57c3cd94d0c59       24a140c548c07       3 minutes ago       Exited              kube-proxy                0                   c900cfd22280f       kube-proxy-l59dx
	492408bc37d38       27e3830e14027       3 minutes ago       Exited              etcd                      0                   e43a7a67672f1       etcd-functional-911502
	31265291ac7da       7f8aa378bb47d       3 minutes ago       Exited              kube-scheduler            0                   578f22ca4016c       kube-scheduler-functional-911502
	928f2c64d0a66       279f381cb3736       3 minutes ago       Exited              kube-controller-manager   0                   54de1abbce22f       kube-controller-manager-functional-911502
	
	
	==> containerd <==
	Sep 16 10:49:01 functional-911502 containerd[3778]: time="2024-09-16T10:49:01.942508333Z" level=info msg="StartContainer for \"494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb\""
	Sep 16 10:49:02 functional-911502 containerd[3778]: time="2024-09-16T10:49:02.043245100Z" level=info msg="StartContainer for \"494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb\" returns successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.771343568Z" level=info msg="StopPodSandbox for \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\""
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.771455657Z" level=info msg="TearDown network for sandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.771471157Z" level=info msg="StopPodSandbox for \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" returns successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.772324288Z" level=info msg="RemovePodSandbox for \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\""
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.772366979Z" level=info msg="Forcibly stopping sandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\""
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.772456357Z" level=info msg="TearDown network for sandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.777169287Z" level=warning msg="Failed to get podSandbox status for container event for sandboxID \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\": an error occurred when try to find sandbox: not found. Sending the event with nil podSandboxStatus."
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.777295785Z" level=info msg="RemovePodSandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" returns successfully"
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.227679017Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:kubernetes-dashboard-695b96c756-mbl59,Uid:42bb249b-bfab-43de-8434-d26beb3b5dfd,Namespace:kubernetes-dashboard,Attempt:0,}"
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.278412803Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:dashboard-metrics-scraper-c5db448b4-z5ddp,Uid:982110d1-99e0-46cb-b2b3-45fc92464cf7,Namespace:kubernetes-dashboard,Attempt:0,}"
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.334400751Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.334531868Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.334564573Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.334772022Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.391055623Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.391453914Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.391614118Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.391995334Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.455234567Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:kubernetes-dashboard-695b96c756-mbl59,Uid:42bb249b-bfab-43de-8434-d26beb3b5dfd,Namespace:kubernetes-dashboard,Attempt:0,} returns sandbox id \"75ebd5d02de4da23e5dc320b62f8bd412b951e22817d0a1db054c8f711b8f2e5\""
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.457807942Z" level=info msg="PullImage \"docker.io/kubernetesui/dashboard:v2.7.0@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93\""
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.460221170Z" level=error msg="failed to decode hosts.toml" error="invalid `host` tree"
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.543971413Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:dashboard-metrics-scraper-c5db448b4-z5ddp,Uid:982110d1-99e0-46cb-b2b3-45fc92464cf7,Namespace:kubernetes-dashboard,Attempt:0,} returns sandbox id \"c9e5b834275d0d2bc12590706c9bfdd4f603af24b058ca764b86e251b4ab0527\""
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.651872207Z" level=error msg="failed to decode hosts.toml" error="invalid `host` tree"
	
	
	==> coredns [11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39497 - 35637 "HINFO IN 756688810597303784.5152931065563193714. udp 56 false 512" NXDOMAIN qr,rd,ra 56 0.040061481s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> coredns [381c4c4cdcc1a65a0e4d935f5449da5929d2fea3cf4e8c057860064146546ba0] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:57690 - 32762 "HINFO IN 7394603856605586965.6142061183963741332. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.012169588s
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: services is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "services" in API group "" at the cluster scope
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "services" in API group "" at the cluster scope
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: namespaces is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "namespaces" in API group "" at the cluster scope
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "namespaces" in API group "" at the cluster scope
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	
	
	==> describe nodes <==
	Name:               functional-911502
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-911502
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-911502
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_40_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:47:36 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-911502
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:50:55 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:36 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-911502
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 43a21049ac0d40628479cf884a8089e0
	  System UUID:                2830f6a5-4b63-46c5-b24a-468e4df19b79
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (10 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-6kw9d                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m20s
	  kube-system                 etcd-functional-911502                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m27s
	  kube-system                 kindnet-7r2rg                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m20s
	  kube-system                 kube-apiserver-functional-911502             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m17s
	  kube-system                 kube-controller-manager-functional-911502    200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m25s
	  kube-system                 kube-proxy-l59dx                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m20s
	  kube-system                 kube-scheduler-functional-911502             100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m25s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m20s
	  kubernetes-dashboard        dashboard-metrics-scraper-c5db448b4-z5ddp    0 (0%)        0 (0%)      0 (0%)           0 (0%)         3s
	  kubernetes-dashboard        kubernetes-dashboard-695b96c756-mbl59        0 (0%)        0 (0%)      0 (0%)           0 (0%)         3s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 3m19s                  kube-proxy       
	  Normal   Starting                 2m17s                  kube-proxy       
	  Normal   NodeAllocatableEnforced  3m25s                  kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 3m25s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m25s                  kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m25s                  kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m25s                  kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   Starting                 3m25s                  kubelet          Starting kubelet.
	  Normal   RegisteredNode           3m21s                  node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	  Normal   Starting                 2m24s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 2m24s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  2m23s (x8 over 2m23s)  kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    2m23s (x7 over 2m23s)  kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m23s (x7 over 2m23s)  kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  2m23s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           2m14s                  node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [1a427a607f52143d7ababbbe77d9ccd5fb21bed4f47e6ea656a489787066bdd5] <==
	{"level":"info","ts":"2024-09-16T10:48:38.695375Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:38.695629Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.695854Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.695783Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:38.699522Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:38.699870Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:38.700575Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.701150Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.700996Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:40.180057Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180110Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180142Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180157Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180164Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180195Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180204Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.183853Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:40.183916Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:40.184260Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:40.185178Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:40.186391Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:40.187443Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:40.188457Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:40.194734Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:40.194782Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> etcd [492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31] <==
	{"level":"info","ts":"2024-09-16T10:47:33.255286Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.255359Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.262769Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.270884Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:47:33.271109Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.271402Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.272620Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.283805Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.271431Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284172Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284977Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.289591Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.306735Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306879Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306916Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.344113Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:48:38.344168Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-911502","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:48:38.344265Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.344295Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.345876Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.345916Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:48:38.345965Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:48:38.347534Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.347628Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.347663Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-911502","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:51:05 up 1 day, 14:33,  0 users,  load average: 1.22, 1.11, 1.30
	Linux functional-911502 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [472eb48e2a57659caeaf99025beaec2f96e01b98d7d3d7676515ac24fb61fb58] <==
	I0916 10:48:59.220423       1 main.go:299] handling current node
	I0916 10:49:09.220620       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:09.220654       1 main.go:299] handling current node
	I0916 10:49:19.222817       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:19.222857       1 main.go:299] handling current node
	I0916 10:49:29.219698       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:29.219737       1 main.go:299] handling current node
	I0916 10:49:39.219237       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:39.219276       1 main.go:299] handling current node
	I0916 10:49:49.219131       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:49.219171       1 main.go:299] handling current node
	I0916 10:49:59.219970       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:59.220080       1 main.go:299] handling current node
	I0916 10:50:09.224203       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:09.224418       1 main.go:299] handling current node
	I0916 10:50:19.219828       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:19.219867       1 main.go:299] handling current node
	I0916 10:50:29.219398       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:29.219439       1 main.go:299] handling current node
	I0916 10:50:39.228234       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:39.228270       1 main.go:299] handling current node
	I0916 10:50:49.219663       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:49.219706       1 main.go:299] handling current node
	I0916 10:50:59.224567       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:59.224606       1 main.go:299] handling current node
	
	
	==> kindnet [ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b] <==
	I0916 10:47:45.041351       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:47:45.041663       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:47:45.041804       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:47:45.041819       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:47:45.041830       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:47:45.520963       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:47:45.521152       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:47:45.521205       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:47:45.722171       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:47:45.722199       1 metrics.go:61] Registering metrics
	I0916 10:47:45.722266       1 controller.go:374] Syncing nftables rules
	I0916 10:47:55.524822       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:47:55.524859       1 main.go:299] handling current node
	I0916 10:48:05.526968       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:05.527019       1 main.go:299] handling current node
	I0916 10:48:15.528186       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:15.528219       1 main.go:299] handling current node
	I0916 10:48:25.523558       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:25.523675       1 main.go:299] handling current node
	
	
	==> kube-apiserver [95188fff801b22ea7d5c57a472a58be0bc02010422f1867802de90863ce56801] <==
	I0916 10:48:42.839848       1 options.go:228] external host was not specified, using 192.168.49.2
	I0916 10:48:42.855248       1 server.go:142] Version: v1.31.1
	I0916 10:48:42.855626       1 server.go:144] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	E0916 10:48:42.856037       1 run.go:72] "command failed" err="failed to create listener: failed to listen on 0.0.0.0:8441: listen tcp 0.0.0.0:8441: bind: address already in use"
	
	
	==> kube-apiserver [da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12] <==
	I0916 10:48:47.151562       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:47.157426       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:47.157660       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:47.159024       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:48:47.159224       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:47.159303       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:47.159367       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:47.159432       1 cache.go:39] Caches are synced for autoregister controller
	I0916 10:48:47.189320       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:48:47.189540       1 policy_source.go:224] refreshing policies
	I0916 10:48:47.191266       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:48:47.230910       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:47.961268       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	W0916 10:48:48.300845       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2]
	I0916 10:48:48.302392       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:48:48.308164       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:48:48.837004       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:48:48.966544       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:48:48.985830       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:48:49.056843       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:48:49.065696       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:51:01.396849       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 10:51:01.558533       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 10:51:02.180634       1 alloc.go:330] "allocated clusterIPs" service="kubernetes-dashboard/kubernetes-dashboard" clusterIPs={"IPv4":"10.111.52.145"}
	I0916 10:51:02.217515       1 alloc.go:330] "allocated clusterIPs" service="kubernetes-dashboard/dashboard-metrics-scraper" clusterIPs={"IPv4":"10.107.16.51"}
	
	
	==> kube-controller-manager [928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a] <==
	I0916 10:47:43.260717       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:47:43.260741       1 shared_informer.go:320] Caches are synced for legacy-service-account-token-cleaner
	I0916 10:47:43.260789       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:47:43.264947       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:47:43.270925       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:43.274016       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:47:43.289621       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:47:43.309403       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:47:43.755815       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810781       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810815       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:47:43.822097       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:44.310052       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="384.204702ms"
	I0916 10:47:44.362819       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="52.71544ms"
	I0916 10:47:44.426732       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="63.859952ms"
	I0916 10:47:44.459758       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="32.950043ms"
	I0916 10:47:44.479101       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="19.296296ms"
	I0916 10:47:44.479182       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.386µs"
	I0916 10:47:46.277218       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="54.999µs"
	I0916 10:47:46.284221       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="47.499µs"
	I0916 10:47:46.289165       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.016µs"
	I0916 10:47:49.776695       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:56.302801       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="57.165µs"
	I0916 10:47:57.316220       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="18.588165ms"
	I0916 10:47:57.316368       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="102.169µs"
	
	
	==> kube-controller-manager [d954d9e91e01c0d0330d1084c35e9e29fd31db11cc4bc7b4efdc63e3c17afd45] <==
	I0916 10:49:06.732627       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="25.090829ms"
	I0916 10:49:06.733069       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="63.655µs"
	I0916 10:51:01.620793       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="56.746261ms"
	E0916 10:51:01.621009       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.653757       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="31.565578ms"
	E0916 10:51:01.654052       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.752122       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="96.817172ms"
	E0916 10:51:01.752351       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.779052       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="131.931306ms"
	E0916 10:51:01.779266       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.823989       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="69.206055ms"
	E0916 10:51:01.824197       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.835228       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="51.706588ms"
	E0916 10:51:01.835537       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.877168       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="50.302551ms"
	E0916 10:51:01.877428       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.877726       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="38.681906ms"
	E0916 10:51:01.877840       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:02.013803       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="133.031188ms"
	I0916 10:51:02.075833       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="194.640532ms"
	I0916 10:51:02.097439       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="83.571405ms"
	I0916 10:51:02.097659       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="164.167µs"
	I0916 10:51:02.120765       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="44.881154ms"
	I0916 10:51:02.121073       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="41.14µs"
	I0916 10:51:02.137019       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="43.495µs"
	
	
	==> kube-proxy [1e1c55f6d316ecfe21daf226fdfd7a0c46b596f2904f3881ac5d62f21f9aa385] <==
	I0916 10:48:28.645183       1 server_linux.go:66] "Using iptables proxy"
	E0916 10:48:28.721587       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:29.873863       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:32.256620       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:36.493031       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	I0916 10:48:47.226267       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:47.226420       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:47.297852       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:47.298129       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:47.300753       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:47.301480       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:47.301624       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:47.312238       1 config.go:199] "Starting service config controller"
	I0916 10:48:47.312282       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:47.312370       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:47.312419       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:47.317887       1 config.go:328] "Starting node config controller"
	I0916 10:48:47.317941       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:47.413200       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:47.413370       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:47.418734       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6] <==
	I0916 10:47:44.794046       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:47:44.895525       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:47:44.895614       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:47:44.916570       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:47:44.916637       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:47:44.918597       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:47:44.919425       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:47:44.919452       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:47:44.925419       1 config.go:199] "Starting service config controller"
	I0916 10:47:44.925472       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:47:44.925521       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:47:44.925531       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:47:44.928903       1 config.go:328] "Starting node config controller"
	I0916 10:47:44.928927       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:47:45.030179       1 shared_informer.go:320] Caches are synced for node config
	I0916 10:47:45.030253       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:47:45.030287       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [16a7dfc9e01198cab582eac129b2a3162c14e01f4ade815bbd0695fd67b02c4c] <==
	I0916 10:48:43.411981       1 serving.go:386] Generated self-signed cert in-memory
	W0916 10:48:47.054124       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 10:48:47.054270       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 10:48:47.054329       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 10:48:47.054367       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 10:48:47.137658       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:47.137797       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:47.143443       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:47.143739       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:47.146739       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:47.146832       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:47.247771       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44] <==
	E0916 10:47:37.419493       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419534       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:47:37.419548       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419590       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:47:37.419607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419645       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419660       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419704       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419719       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422428       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:47:37.422472       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422530       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:47:37.422547       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422779       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 10:47:37.422805       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 10:47:37.422862       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:47:37.422883       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424481       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.424516       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424588       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:47:37.424607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424692       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:47:37.424724       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:47:38.910602       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	E0916 10:48:38.401394       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kubelet <==
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.070591    4699 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"} err="failed to get container status \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": rpc error: code = NotFound desc = an error occurred when try to find container \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": not found"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: W0916 10:48:44.086224    4699 reflector.go:561] object-"kube-system"/"coredns": failed to list *v1.ConfigMap: Get "https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/configmaps?fieldSelector=metadata.name%3Dcoredns&resourceVersion=447": dial tcp 192.168.49.2:8441: connect: connection refused
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.086285    4699 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"coredns\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/configmaps?fieldSelector=metadata.name%3Dcoredns&resourceVersion=447\": dial tcp 192.168.49.2:8441: connect: connection refused" logger="UnhandledError"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: W0916 10:48:44.257240    4699 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://control-plane.minikube.internal:8441/apis/node.k8s.io/v1/runtimeclasses?resourceVersion=447": dial tcp 192.168.49.2:8441: connect: connection refused
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.257307    4699 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://control-plane.minikube.internal:8441/apis/node.k8s.io/v1/runtimeclasses?resourceVersion=447\": dial tcp 192.168.49.2:8441: connect: connection refused" logger="UnhandledError"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.911110    4699 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="846e81f7bcac6804cf5ef499ea5ac265" path="/var/lib/kubelet/pods/846e81f7bcac6804cf5ef499ea5ac265/volumes"
	Sep 16 10:48:45 functional-911502 kubelet[4699]: I0916 10:48:45.045335    4699 kubelet.go:1895] "Trying to delete pod" pod="kube-system/kube-apiserver-functional-911502" podUID="d399bd77-51dd-4ad3-90d4-6cf11e9e156e"
	Sep 16 10:48:47 functional-911502 kubelet[4699]: I0916 10:48:47.316886    4699 kubelet.go:1900] "Deleted mirror pod because it is outdated" pod="kube-system/kube-apiserver-functional-911502"
	Sep 16 10:48:47 functional-911502 kubelet[4699]: I0916 10:48:47.549623    4699 scope.go:117] "RemoveContainer" containerID="ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: I0916 10:48:48.083956    4699 scope.go:117] "RemoveContainer" containerID="ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: I0916 10:48:48.084957    4699 scope.go:117] "RemoveContainer" containerID="0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: E0916 10:48:48.085234    4699 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"storage-provisioner\" with CrashLoopBackOff: \"back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(ecac562d-8318-4226-b5f1-61f2c76bb51b)\"" pod="kube-system/storage-provisioner" podUID="ecac562d-8318-4226-b5f1-61f2c76bb51b"
	Sep 16 10:48:51 functional-911502 kubelet[4699]: I0916 10:48:51.367187    4699 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-apiserver-functional-911502" podStartSLOduration=4.367170601 podStartE2EDuration="4.367170601s" podCreationTimestamp="2024-09-16 10:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:48:48.153684049 +0000 UTC m=+7.599562137" watchObservedRunningTime="2024-09-16 10:48:51.367170601 +0000 UTC m=+10.813048706"
	Sep 16 10:49:01 functional-911502 kubelet[4699]: I0916 10:49:01.908487    4699 scope.go:117] "RemoveContainer" containerID="0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: E0916 10:51:02.003636    4699 cpu_manager.go:395] "RemoveStaleState: removing container" podUID="846e81f7bcac6804cf5ef499ea5ac265" containerName="kube-apiserver"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.003722    4699 memory_manager.go:354] "RemoveStaleState removing state" podUID="846e81f7bcac6804cf5ef499ea5ac265" containerName="kube-apiserver"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.003733    4699 memory_manager.go:354] "RemoveStaleState removing state" podUID="846e81f7bcac6804cf5ef499ea5ac265" containerName="kube-apiserver"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: W0916 10:51:02.021783    4699 reflector.go:561] object-"kubernetes-dashboard"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:functional-911502" cannot list resource "configmaps" in API group "" in the namespace "kubernetes-dashboard": no relationship found between node 'functional-911502' and this object
	Sep 16 10:51:02 functional-911502 kubelet[4699]: E0916 10:51:02.022004    4699 reflector.go:158] "Unhandled Error" err="object-\"kubernetes-dashboard\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:functional-911502\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kubernetes-dashboard\": no relationship found between node 'functional-911502' and this object" logger="UnhandledError"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: E0916 10:51:02.072359    4699 cpu_manager.go:395] "RemoveStaleState: removing container" podUID="846e81f7bcac6804cf5ef499ea5ac265" containerName="kube-apiserver"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.122170    4699 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxq8p\" (UniqueName: \"kubernetes.io/projected/42bb249b-bfab-43de-8434-d26beb3b5dfd-kube-api-access-bxq8p\") pod \"kubernetes-dashboard-695b96c756-mbl59\" (UID: \"42bb249b-bfab-43de-8434-d26beb3b5dfd\") " pod="kubernetes-dashboard/kubernetes-dashboard-695b96c756-mbl59"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.122219    4699 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/42bb249b-bfab-43de-8434-d26beb3b5dfd-tmp-volume\") pod \"kubernetes-dashboard-695b96c756-mbl59\" (UID: \"42bb249b-bfab-43de-8434-d26beb3b5dfd\") " pod="kubernetes-dashboard/kubernetes-dashboard-695b96c756-mbl59"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.222910    4699 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2mdj\" (UniqueName: \"kubernetes.io/projected/982110d1-99e0-46cb-b2b3-45fc92464cf7-kube-api-access-q2mdj\") pod \"dashboard-metrics-scraper-c5db448b4-z5ddp\" (UID: \"982110d1-99e0-46cb-b2b3-45fc92464cf7\") " pod="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-z5ddp"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.222978    4699 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/982110d1-99e0-46cb-b2b3-45fc92464cf7-tmp-volume\") pod \"dashboard-metrics-scraper-c5db448b4-z5ddp\" (UID: \"982110d1-99e0-46cb-b2b3-45fc92464cf7\") " pod="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-z5ddp"
	Sep 16 10:51:03 functional-911502 kubelet[4699]: I0916 10:51:03.132461    4699 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	
	
	==> storage-provisioner [0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc] <==
	I0916 10:48:47.636953       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	F0916 10:48:47.638558       1 main.go:39] error getting server version: Get "https://10.96.0.1:443/version?timeout=32s": dial tcp 10.96.0.1:443: connect: connection refused
	
	
	==> storage-provisioner [494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb] <==
	I0916 10:49:02.043876       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:49:02.059730       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:49:02.059783       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:49:19.457349       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:49:19.457523       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5!
	I0916 10:49:19.457940       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"28a802e4-0156-4c92-adef-4d6f2592a206", APIVersion:"v1", ResourceVersion:"554", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5 became leader
	I0916 10:49:19.558377       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-911502 -n functional-911502
helpers_test.go:261: (dbg) Run:  kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (583.372µs)
helpers_test.go:263: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/parallel/DashboardCmd (7.19s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmdConnect (2.55s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmdConnect
=== PAUSE TestFunctional/parallel/ServiceCmdConnect

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmdConnect
functional_test.go:1627: (dbg) Run:  kubectl --context functional-911502 create deployment hello-node-connect --image=registry.k8s.io/echoserver-arm:1.8
functional_test.go:1627: (dbg) Non-zero exit: kubectl --context functional-911502 create deployment hello-node-connect --image=registry.k8s.io/echoserver-arm:1.8: fork/exec /usr/local/bin/kubectl: exec format error (483.821µs)
functional_test.go:1633: failed to create hello-node deployment with this command "kubectl --context functional-911502 create deployment hello-node-connect --image=registry.k8s.io/echoserver-arm:1.8": fork/exec /usr/local/bin/kubectl: exec format error.
functional_test.go:1598: service test failed - dumping debug information
functional_test.go:1599: -----------------------service failure post-mortem--------------------------------
functional_test.go:1602: (dbg) Run:  kubectl --context functional-911502 describe po hello-node-connect
functional_test.go:1602: (dbg) Non-zero exit: kubectl --context functional-911502 describe po hello-node-connect: fork/exec /usr/local/bin/kubectl: exec format error (590.47µs)
functional_test.go:1604: "kubectl --context functional-911502 describe po hello-node-connect" failed: fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:1606: hello-node pod describe:
functional_test.go:1608: (dbg) Run:  kubectl --context functional-911502 logs -l app=hello-node-connect
functional_test.go:1608: (dbg) Non-zero exit: kubectl --context functional-911502 logs -l app=hello-node-connect: fork/exec /usr/local/bin/kubectl: exec format error (451.829µs)
functional_test.go:1610: "kubectl --context functional-911502 logs -l app=hello-node-connect" failed: fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:1612: hello-node logs:
functional_test.go:1614: (dbg) Run:  kubectl --context functional-911502 describe svc hello-node-connect
functional_test.go:1614: (dbg) Non-zero exit: kubectl --context functional-911502 describe svc hello-node-connect: fork/exec /usr/local/bin/kubectl: exec format error (554.114µs)
functional_test.go:1616: "kubectl --context functional-911502 describe svc hello-node-connect" failed: fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:1618: hello-node svc describe:
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/parallel/ServiceCmdConnect]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-911502
helpers_test.go:235: (dbg) docker inspect functional-911502:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1",
	        "Created": "2024-09-16T10:47:14.597354828Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2085675,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:47:14.7319597Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hostname",
	        "HostsPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hosts",
	        "LogPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1-json.log",
	        "Name": "/functional-911502",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-911502:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-911502",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/merged",
	                "UpperDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/diff",
	                "WorkDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "volume",
	                "Name": "functional-911502",
	                "Source": "/var/lib/docker/volumes/functional-911502/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            },
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-911502",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-911502",
	                "name.minikube.sigs.k8s.io": "functional-911502",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "44bb4c6f2ec0f0eef04adb8f886d0e0de7d31ae50612de741bed0ee945b2b75e",
	            "SandboxKey": "/var/run/docker/netns/44bb4c6f2ec0",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40592"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40593"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40596"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40594"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40595"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-911502": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "8c4428adf23c812318456ac17bea5953b33d7961994dfc84c0ff82a45764b662",
	                    "EndpointID": "8b3cc6f2c9f87b61b7e755d7ecd320ed6313887dfb3deab9f4e0858aa1c9fe80",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-911502",
	                        "9bf795605895"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-911502 -n functional-911502
helpers_test.go:244: <<< TestFunctional/parallel/ServiceCmdConnect FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/ServiceCmdConnect]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 logs -n 25: (1.774971401s)
helpers_test.go:252: TestFunctional/parallel/ServiceCmdConnect logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| Command |                                   Args                                   |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| cache   | functional-911502 cache reload                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	| ssh     | functional-911502 ssh                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | sudo crictl inspecti                                                     |                   |         |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:3.1                                                |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| kubectl | functional-911502 kubectl --                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | --context functional-911502                                              |                   |         |         |                     |                     |
	|         | get pods                                                                 |                   |         |         |                     |                     |
	| start   | -p functional-911502                                                     | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision |                   |         |         |                     |                     |
	|         | --wait=all                                                               |                   |         |         |                     |                     |
	| config  | functional-911502 config unset                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| cp      | functional-911502 cp                                                     | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | testdata/cp-test.txt                                                     |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| config  | functional-911502 config get                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| config  | functional-911502 config set                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus 2                                                                   |                   |         |         |                     |                     |
	| config  | functional-911502 config get                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| config  | functional-911502 config unset                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh -n                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-911502 sudo cat                                               |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| config  | functional-911502 config get                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh echo                                               | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | hello                                                                    |                   |         |         |                     |                     |
	| cp      | functional-911502 cp                                                     | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-911502:/home/docker/cp-test.txt                               |                   |         |         |                     |                     |
	|         | /tmp/TestFunctionalparallelCpCmd3382788966/001/cp-test.txt               |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh cat                                                | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | /etc/hostname                                                            |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh -n                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-911502 sudo cat                                               |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| tunnel  | functional-911502 tunnel                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| tunnel  | functional-911502 tunnel                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| cp      | functional-911502 cp                                                     | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | testdata/cp-test.txt                                                     |                   |         |         |                     |                     |
	|         | /tmp/does/not/exist/cp-test.txt                                          |                   |         |         |                     |                     |
	| tunnel  | functional-911502 tunnel                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh -n                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-911502 sudo cat                                               |                   |         |         |                     |                     |
	|         | /tmp/does/not/exist/cp-test.txt                                          |                   |         |         |                     |                     |
	| addons  | functional-911502 addons list                                            | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	| addons  | functional-911502 addons list                                            | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|         | -o json                                                                  |                   |         |         |                     |                     |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:48:18
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:48:18.056558 2090441 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:48:18.056731 2090441 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:18.056736 2090441 out.go:358] Setting ErrFile to fd 2...
	I0916 10:48:18.056744 2090441 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:18.057119 2090441 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:48:18.057904 2090441 out.go:352] Setting JSON to false
	I0916 10:48:18.059612 2090441 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":138640,"bootTime":1726345058,"procs":201,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:48:18.060116 2090441 start.go:139] virtualization:  
	I0916 10:48:18.063723 2090441 out.go:177] * [functional-911502] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:48:18.066427 2090441 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:48:18.066506 2090441 notify.go:220] Checking for updates...
	I0916 10:48:18.071616 2090441 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:48:18.074552 2090441 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:18.077059 2090441 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:48:18.079403 2090441 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:48:18.081886 2090441 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:48:18.085175 2090441 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:18.085267 2090441 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:48:18.118972 2090441 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:48:18.119082 2090441 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:18.187814 2090441 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:84 SystemTime:2024-09-16 10:48:18.177439051 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:18.187915 2090441 docker.go:318] overlay module found
	I0916 10:48:18.190819 2090441 out.go:177] * Using the docker driver based on existing profile
	I0916 10:48:18.193361 2090441 start.go:297] selected driver: docker
	I0916 10:48:18.193371 2090441 start.go:901] validating driver "docker" against &{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountU
ID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:18.193484 2090441 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:48:18.193591 2090441 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:18.252173 2090441 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:84 SystemTime:2024-09-16 10:48:18.241918971 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:18.252721 2090441 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:18.252750 2090441 cni.go:84] Creating CNI manager for ""
	I0916 10:48:18.252804 2090441 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:48:18.252849 2090441 start.go:340] cluster config:
	{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUI
D:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:18.255882 2090441 out.go:177] * Starting "functional-911502" primary control-plane node in "functional-911502" cluster
	I0916 10:48:18.258466 2090441 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:48:18.261084 2090441 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:48:18.263652 2090441 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:48:18.263697 2090441 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:48:18.263702 2090441 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:48:18.263711 2090441 cache.go:56] Caching tarball of preloaded images
	I0916 10:48:18.263789 2090441 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:48:18.263812 2090441 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:48:18.263919 2090441 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/config.json ...
	W0916 10:48:18.282640 2090441 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:48:18.282651 2090441 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:48:18.282779 2090441 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:48:18.282796 2090441 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:48:18.282799 2090441 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:48:18.282806 2090441 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:48:18.282811 2090441 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:48:18.403176 2090441 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:48:18.403224 2090441 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:48:18.403279 2090441 start.go:360] acquireMachinesLock for functional-911502: {Name:mk182321dd921c9bc14d73d2af41d001efc879fd Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:48:18.403399 2090441 start.go:364] duration metric: took 82.79µs to acquireMachinesLock for "functional-911502"
	I0916 10:48:18.403430 2090441 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:48:18.403435 2090441 fix.go:54] fixHost starting: 
	I0916 10:48:18.403813 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:18.421104 2090441 fix.go:112] recreateIfNeeded on functional-911502: state=Running err=<nil>
	W0916 10:48:18.421131 2090441 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:48:18.424219 2090441 out.go:177] * Updating the running docker "functional-911502" container ...
	I0916 10:48:18.426570 2090441 machine.go:93] provisionDockerMachine start ...
	I0916 10:48:18.426707 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:18.443892 2090441 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:18.444150 2090441 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:48:18.444157 2090441 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:48:18.582615 2090441 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-911502
	
	I0916 10:48:18.582628 2090441 ubuntu.go:169] provisioning hostname "functional-911502"
	I0916 10:48:18.582729 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:18.601133 2090441 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:18.601387 2090441 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:48:18.601402 2090441 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-911502 && echo "functional-911502" | sudo tee /etc/hostname
	I0916 10:48:18.750562 2090441 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-911502
	
	I0916 10:48:18.750635 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:18.768271 2090441 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:18.768521 2090441 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:48:18.768536 2090441 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-911502' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-911502/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-911502' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:48:18.907246 2090441 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:48:18.907263 2090441 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:48:18.907293 2090441 ubuntu.go:177] setting up certificates
	I0916 10:48:18.907302 2090441 provision.go:84] configureAuth start
	I0916 10:48:18.907364 2090441 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-911502
	I0916 10:48:18.927347 2090441 provision.go:143] copyHostCerts
	I0916 10:48:18.927406 2090441 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:48:18.927422 2090441 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:48:18.927486 2090441 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:48:18.927589 2090441 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:48:18.927593 2090441 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:48:18.927630 2090441 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:48:18.927703 2090441 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:48:18.927706 2090441 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:48:18.927733 2090441 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:48:18.927784 2090441 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.functional-911502 san=[127.0.0.1 192.168.49.2 functional-911502 localhost minikube]
	I0916 10:48:19.991257 2090441 provision.go:177] copyRemoteCerts
	I0916 10:48:19.991315 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:48:19.991358 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.029993 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.128591 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:48:20.156277 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1220 bytes)
	I0916 10:48:20.183185 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:48:20.209618 2090441 provision.go:87] duration metric: took 1.302302469s to configureAuth
	I0916 10:48:20.209635 2090441 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:48:20.209838 2090441 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:20.209844 2090441 machine.go:96] duration metric: took 1.783266636s to provisionDockerMachine
	I0916 10:48:20.209851 2090441 start.go:293] postStartSetup for "functional-911502" (driver="docker")
	I0916 10:48:20.209861 2090441 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:48:20.209924 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:48:20.209968 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.227087 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.323907 2090441 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:48:20.327142 2090441 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:48:20.327167 2090441 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:48:20.327179 2090441 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:48:20.327185 2090441 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:48:20.327194 2090441 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:48:20.327249 2090441 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:48:20.327327 2090441 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:48:20.327402 2090441 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts -> hosts in /etc/test/nested/copy/2063326
	I0916 10:48:20.327447 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/2063326
	I0916 10:48:20.336043 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:48:20.360460 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts --> /etc/test/nested/copy/2063326/hosts (40 bytes)
	I0916 10:48:20.385297 2090441 start.go:296] duration metric: took 175.431776ms for postStartSetup
	I0916 10:48:20.385378 2090441 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:48:20.385419 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.402295 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.495689 2090441 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:48:20.500214 2090441 fix.go:56] duration metric: took 2.096771088s for fixHost
	I0916 10:48:20.500228 2090441 start.go:83] releasing machines lock for "functional-911502", held for 2.096820746s
	I0916 10:48:20.500311 2090441 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-911502
	I0916 10:48:20.517203 2090441 ssh_runner.go:195] Run: cat /version.json
	I0916 10:48:20.517249 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.517492 2090441 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:48:20.517559 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.534860 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.538812 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.630195 2090441 ssh_runner.go:195] Run: systemctl --version
	I0916 10:48:20.761370 2090441 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:48:20.765788 2090441 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:48:20.785230 2090441 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:48:20.785301 2090441 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:48:20.794254 2090441 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:48:20.794269 2090441 start.go:495] detecting cgroup driver to use...
	I0916 10:48:20.794301 2090441 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:48:20.794353 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:48:20.807440 2090441 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:48:20.819052 2090441 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:48:20.819108 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:48:20.832763 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:48:20.845443 2090441 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:48:20.973966 2090441 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:48:21.096206 2090441 docker.go:233] disabling docker service ...
	I0916 10:48:21.096283 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:48:21.120231 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:48:21.134274 2090441 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:48:21.245027 2090441 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:48:21.353592 2090441 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:48:21.366138 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:48:21.385216 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:48:21.397039 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:48:21.408473 2090441 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:48:21.408530 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:48:21.419890 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:48:21.430748 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:48:21.441177 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:48:21.452981 2090441 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:48:21.463398 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:48:21.474455 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:48:21.485837 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:48:21.495989 2090441 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:48:21.504821 2090441 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:48:21.514007 2090441 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:21.630033 2090441 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:48:21.929885 2090441 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:48:21.929958 2090441 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:48:21.934261 2090441 start.go:563] Will wait 60s for crictl version
	I0916 10:48:21.934371 2090441 ssh_runner.go:195] Run: which crictl
	I0916 10:48:21.937870 2090441 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:48:21.977142 2090441 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:48:21.977214 2090441 ssh_runner.go:195] Run: containerd --version
	I0916 10:48:21.999841 2090441 ssh_runner.go:195] Run: containerd --version
	I0916 10:48:22.030027 2090441 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:48:22.032799 2090441 cli_runner.go:164] Run: docker network inspect functional-911502 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:48:22.049379 2090441 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:48:22.056006 2090441 out.go:177]   - apiserver.enable-admission-plugins=NamespaceAutoProvision
	I0916 10:48:22.058597 2090441 kubeadm.go:883] updating cluster {Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA API
ServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: Mou
ntMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:48:22.058756 2090441 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:48:22.058847 2090441 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:22.096492 2090441 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:48:22.096505 2090441 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:48:22.096567 2090441 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:22.140078 2090441 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:48:22.140090 2090441 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:48:22.140096 2090441 kubeadm.go:934] updating node { 192.168.49.2 8441 v1.31.1 containerd true true} ...
	I0916 10:48:22.140203 2090441 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=functional-911502 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:48:22.140274 2090441 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:48:22.179208 2090441 extraconfig.go:124] Overwriting default enable-admission-plugins=NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota with user provided enable-admission-plugins=NamespaceAutoProvision for component apiserver
	I0916 10:48:22.179227 2090441 cni.go:84] Creating CNI manager for ""
	I0916 10:48:22.179236 2090441 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:48:22.179244 2090441 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:48:22.179266 2090441 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8441 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:functional-911502 NodeName:functional-911502 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceAutoProvision] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfi
gOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:48:22.179387 2090441 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8441
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "functional-911502"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceAutoProvision"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8441
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:48:22.179477 2090441 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:48:22.188765 2090441 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:48:22.188832 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:48:22.197782 2090441 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (321 bytes)
	I0916 10:48:22.216639 2090441 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:48:22.234768 2090441 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2021 bytes)
	I0916 10:48:22.253522 2090441 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:48:22.257457 2090441 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:22.374849 2090441 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:22.388543 2090441 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502 for IP: 192.168.49.2
	I0916 10:48:22.388555 2090441 certs.go:194] generating shared ca certs ...
	I0916 10:48:22.388570 2090441 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:22.388723 2090441 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:48:22.388763 2090441 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:48:22.388769 2090441 certs.go:256] generating profile certs ...
	I0916 10:48:22.388849 2090441 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.key
	I0916 10:48:22.388891 2090441 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key.03a9d60c
	I0916 10:48:22.388929 2090441 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key
	I0916 10:48:22.389051 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:48:22.389077 2090441 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:48:22.389085 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:48:22.389109 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:48:22.389129 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:48:22.389149 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:48:22.389190 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:48:22.389803 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:48:22.417755 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:48:22.444790 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:48:22.469829 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:48:22.494601 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:48:22.519153 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:48:22.545011 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:48:22.569691 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:48:22.595014 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:48:22.619318 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:48:22.644259 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:48:22.668772 2090441 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:48:22.686564 2090441 ssh_runner.go:195] Run: openssl version
	I0916 10:48:22.692173 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:48:22.702850 2090441 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:22.706341 2090441 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:22.706407 2090441 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:22.713233 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:48:22.722032 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:48:22.731748 2090441 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:48:22.735358 2090441 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:48:22.735429 2090441 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:48:22.742406 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:48:22.751458 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:48:22.760959 2090441 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:48:22.764337 2090441 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:48:22.764391 2090441 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:48:22.771679 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:48:22.780664 2090441 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:48:22.784169 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:48:22.790852 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:48:22.797686 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:48:22.804722 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:48:22.811546 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:48:22.818077 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:48:22.824812 2090441 kubeadm.go:392] StartCluster: {Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISer
verNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountM
Size:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:22.824893 2090441 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:48:22.824962 2090441 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:48:22.863779 2090441 cri.go:89] found id: "11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950"
	I0916 10:48:22.863790 2090441 cri.go:89] found id: "8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27"
	I0916 10:48:22.863793 2090441 cri.go:89] found id: "ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b"
	I0916 10:48:22.863796 2090441 cri.go:89] found id: "57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6"
	I0916 10:48:22.863798 2090441 cri.go:89] found id: "a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	I0916 10:48:22.863801 2090441 cri.go:89] found id: "492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31"
	I0916 10:48:22.863804 2090441 cri.go:89] found id: "31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44"
	I0916 10:48:22.863812 2090441 cri.go:89] found id: "928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a"
	I0916 10:48:22.863814 2090441 cri.go:89] found id: ""
	I0916 10:48:22.863867 2090441 ssh_runner.go:195] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0916 10:48:22.896045 2090441 cri.go:116] JSON = [{"ociVersion":"1.0.2-dev","id":"11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","pid":2200,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950/rootfs","created":"2024-09-16T10:47:56.240222491Z","annotations":{"io.kubernetes.cri.container-name":"coredns","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/coredns/coredns:v1.11.3","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"31265291ac7da492c3c
fad84540ba2b684cdf0abad82be5c56d392df7613dc44","pid":1422,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44/rootfs","created":"2024-09-16T10:47:32.746424926Z","annotations":{"io.kubernetes.cri.container-name":"kube-scheduler","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-scheduler:v1.31.1","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","pid":1960,"status":"running","bundle":"/run/containe
rd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c/rootfs","created":"2024-09-16T10:47:45.184280835Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_storage-provisioner_ecac562d-8318-4226-b5f1-61f2c76bb51b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6
b99ce6c31","pid":1486,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31/rootfs","created":"2024-09-16T10:47:32.919386537Z","annotations":{"io.kubernetes.cri.container-name":"etcd","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/etcd:3.5.15-0","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","pid":1315,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de05
1f556893ed4577eb8b5d9d38835da8d64517b7a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a/rootfs","created":"2024-09-16T10:47:32.575723929Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"256","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-apiserver-functional-911502_846e81f7bcac6804cf5ef499ea5ac265","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","pid":1316,"status":"running","bun
dle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f/rootfs","created":"2024-09-16T10:47:32.562785558Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"204","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-controller-manager-functional-911502_26c4a2e985a1c721e0411e5d9497a35b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id"
:"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","pid":1338,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7/rootfs","created":"2024-09-16T10:47:32.61203341Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-scheduler-functional-911502_69dae9ff35c780f43a15f539d6f19e46","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid"
:"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","pid":1792,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6/rootfs","created":"2024-09-16T10:47:44.701696235Z","annotations":{"io.kubernetes.cri.container-name":"kube-proxy","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-proxy:v1.31.1","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"8aede76947864ca07593bc24b939a29faf7
bb7dd85244f30f18f232f3ec1ea27","pid":2092,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27/rootfs","created":"2024-09-16T10:47:45.396938875Z","annotations":{"io.kubernetes.cri.container-name":"storage-provisioner","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"gcr.io/k8s-minikube/storage-provisioner:v5","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","pid":1399,"status":"running","bundle":"/run/containerd/io.containerd
.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a/rootfs","created":"2024-09-16T10:47:32.722647206Z","annotations":{"io.kubernetes.cri.container-name":"kube-controller-manager","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-controller-manager:v1.31.1","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","pid":2166,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3e
e6fc0070983ddfd","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd/rootfs","created":"2024-09-16T10:47:56.156755788Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_coredns-7c65d6cfc9-6kw9d_072167c7-fa1a-463e-a957-91ea24020387","io.kubernetes.cri.sandbox-memory":"178257920","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","pid":1479,"status":"running","bundle":"/run/containerd/io
.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19/rootfs","created":"2024-09-16T10:47:32.854949973Z","annotations":{"io.kubernetes.cri.container-name":"kube-apiserver","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-apiserver:v1.31.1","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","pid":1768,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e"
,"rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e/rootfs","created":"2024-09-16T10:47:44.62156803Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"10000","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kindnet-7r2rg_ab52a601-e0fe-4f60-a202-477487da9bb2","io.kubernetes.cri.sandbox-memory":"52428800","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","pid":1733,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c
900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1/rootfs","created":"2024-09-16T10:47:44.525242585Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-proxy-l59dx_72a26843-9f97-4121-91f0-3cb389048315","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","pid":1835,"status":"running","bundl
e":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b/rootfs","created":"2024-09-16T10:47:44.827883907Z","annotations":{"io.kubernetes.cri.container-name":"kindnet-cni","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"docker.io/kindest/kindnetd:v20240813-c6f155d6","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","pid":1295,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedb
e08802f86e","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e/rootfs","created":"2024-09-16T10:47:32.534113117Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_etcd-functional-911502_c7ab8017ca620f2ba7e026f4cdb427a2","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"}]
	I0916 10:48:22.896348 2090441 cri.go:126] list returned 16 containers
	I0916 10:48:22.896356 2090441 cri.go:129] container: {ID:11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 Status:running}
	I0916 10:48:22.896377 2090441 cri.go:135] skipping {11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 running}: state = "running", want "paused"
	I0916 10:48:22.896384 2090441 cri.go:129] container: {ID:31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 Status:running}
	I0916 10:48:22.896389 2090441 cri.go:135] skipping {31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 running}: state = "running", want "paused"
	I0916 10:48:22.896394 2090441 cri.go:129] container: {ID:334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c Status:running}
	I0916 10:48:22.896399 2090441 cri.go:131] skipping 334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c - not in ps
	I0916 10:48:22.896404 2090441 cri.go:129] container: {ID:492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 Status:running}
	I0916 10:48:22.896409 2090441 cri.go:135] skipping {492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 running}: state = "running", want "paused"
	I0916 10:48:22.896414 2090441 cri.go:129] container: {ID:51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a Status:running}
	I0916 10:48:22.896418 2090441 cri.go:131] skipping 51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a - not in ps
	I0916 10:48:22.896421 2090441 cri.go:129] container: {ID:54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f Status:running}
	I0916 10:48:22.896424 2090441 cri.go:131] skipping 54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f - not in ps
	I0916 10:48:22.896427 2090441 cri.go:129] container: {ID:578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7 Status:running}
	I0916 10:48:22.896430 2090441 cri.go:131] skipping 578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7 - not in ps
	I0916 10:48:22.896433 2090441 cri.go:129] container: {ID:57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 Status:running}
	I0916 10:48:22.896438 2090441 cri.go:135] skipping {57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 running}: state = "running", want "paused"
	I0916 10:48:22.896442 2090441 cri.go:129] container: {ID:8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 Status:running}
	I0916 10:48:22.896447 2090441 cri.go:135] skipping {8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 running}: state = "running", want "paused"
	I0916 10:48:22.896451 2090441 cri.go:129] container: {ID:928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a Status:running}
	I0916 10:48:22.896455 2090441 cri.go:135] skipping {928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a running}: state = "running", want "paused"
	I0916 10:48:22.896459 2090441 cri.go:129] container: {ID:95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd Status:running}
	I0916 10:48:22.896464 2090441 cri.go:131] skipping 95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd - not in ps
	I0916 10:48:22.896466 2090441 cri.go:129] container: {ID:a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 Status:running}
	I0916 10:48:22.896470 2090441 cri.go:135] skipping {a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 running}: state = "running", want "paused"
	I0916 10:48:22.896474 2090441 cri.go:129] container: {ID:b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e Status:running}
	I0916 10:48:22.896478 2090441 cri.go:131] skipping b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e - not in ps
	I0916 10:48:22.896480 2090441 cri.go:129] container: {ID:c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1 Status:running}
	I0916 10:48:22.896484 2090441 cri.go:131] skipping c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1 - not in ps
	I0916 10:48:22.896491 2090441 cri.go:129] container: {ID:ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b Status:running}
	I0916 10:48:22.896497 2090441 cri.go:135] skipping {ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b running}: state = "running", want "paused"
	I0916 10:48:22.896502 2090441 cri.go:129] container: {ID:e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e Status:running}
	I0916 10:48:22.896506 2090441 cri.go:131] skipping e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e - not in ps
	I0916 10:48:22.896558 2090441 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:48:22.906082 2090441 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 10:48:22.906092 2090441 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 10:48:22.906148 2090441 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 10:48:22.914917 2090441 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:48:22.915488 2090441 kubeconfig.go:125] found "functional-911502" server: "https://192.168.49.2:8441"
	I0916 10:48:22.916782 2090441 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 10:48:22.925899 2090441 kubeadm.go:640] detected kubeadm config drift (will reconfigure cluster from new /var/tmp/minikube/kubeadm.yaml):
	-- stdout --
	--- /var/tmp/minikube/kubeadm.yaml	2024-09-16 10:47:21.242945037 +0000
	+++ /var/tmp/minikube/kubeadm.yaml.new	2024-09-16 10:48:22.250558320 +0000
	@@ -22,7 +22,7 @@
	 apiServer:
	   certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	   extraArgs:
	-    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	+    enable-admission-plugins: "NamespaceAutoProvision"
	 controllerManager:
	   extraArgs:
	     allocate-node-cidrs: "true"
	
	-- /stdout --
	I0916 10:48:22.925911 2090441 kubeadm.go:1160] stopping kube-system containers ...
	I0916 10:48:22.925922 2090441 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name: Namespaces:[kube-system]}
	I0916 10:48:22.925986 2090441 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:48:22.969464 2090441 cri.go:89] found id: "11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950"
	I0916 10:48:22.969476 2090441 cri.go:89] found id: "8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27"
	I0916 10:48:22.969480 2090441 cri.go:89] found id: "ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b"
	I0916 10:48:22.969488 2090441 cri.go:89] found id: "57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6"
	I0916 10:48:22.969491 2090441 cri.go:89] found id: "a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	I0916 10:48:22.969494 2090441 cri.go:89] found id: "492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31"
	I0916 10:48:22.969497 2090441 cri.go:89] found id: "31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44"
	I0916 10:48:22.969499 2090441 cri.go:89] found id: "928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a"
	I0916 10:48:22.969502 2090441 cri.go:89] found id: ""
	I0916 10:48:22.969506 2090441 cri.go:252] Stopping containers: [11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b 57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a]
	I0916 10:48:22.969568 2090441 ssh_runner.go:195] Run: which crictl
	I0916 10:48:22.973333 2090441 ssh_runner.go:195] Run: sudo /usr/bin/crictl stop --timeout=10 11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b 57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a
	I0916 10:48:38.633471 2090441 ssh_runner.go:235] Completed: sudo /usr/bin/crictl stop --timeout=10 11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b 57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a: (15.660102208s)
	I0916 10:48:38.633532 2090441 ssh_runner.go:195] Run: sudo systemctl stop kubelet
	I0916 10:48:38.739961 2090441 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:48:38.750010 2090441 kubeadm.go:157] found existing configuration files:
	-rw------- 1 root root 5651 Sep 16 10:47 /etc/kubernetes/admin.conf
	-rw------- 1 root root 5652 Sep 16 10:47 /etc/kubernetes/controller-manager.conf
	-rw------- 1 root root 2007 Sep 16 10:47 /etc/kubernetes/kubelet.conf
	-rw------- 1 root root 5600 Sep 16 10:47 /etc/kubernetes/scheduler.conf
	
	I0916 10:48:38.750102 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/admin.conf
	I0916 10:48:38.760985 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/kubelet.conf
	I0916 10:48:38.771244 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/controller-manager.conf
	I0916 10:48:38.781324 2090441 kubeadm.go:163] "https://control-plane.minikube.internal:8441" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/controller-manager.conf: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:48:38.781386 2090441 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:48:38.791843 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/scheduler.conf
	I0916 10:48:38.802301 2090441 kubeadm.go:163] "https://control-plane.minikube.internal:8441" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/scheduler.conf: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:48:38.802359 2090441 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:48:38.811917 2090441 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:48:38.825359 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase certs all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:38.890532 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase kubeconfig all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.317726 2090441 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase kubeconfig all --config /var/tmp/minikube/kubeadm.yaml": (1.427159279s)
	I0916 10:48:40.317743 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase kubelet-start --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.550624 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase control-plane all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.692801 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase etcd local --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.850212 2090441 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:48:40.850281 2090441 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:48:40.873399 2090441 api_server.go:72] duration metric: took 23.195463ms to wait for apiserver process to appear ...
	I0916 10:48:40.873414 2090441 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:48:40.873442 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.076555 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	W0916 10:48:42.076579 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	I0916 10:48:42.076592 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.086814 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	W0916 10:48:42.086832 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	I0916 10:48:42.374183 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.383929 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:42.383952 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:42.874448 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.911477 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:42.911495 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:43.373607 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:43.373978 2090441 api_server.go:269] stopped: https://192.168.49.2:8441/healthz: Get "https://192.168.49.2:8441/healthz": dial tcp 192.168.49.2:8441: connect: connection refused
	I0916 10:48:43.873567 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:43.873950 2090441 api_server.go:269] stopped: https://192.168.49.2:8441/healthz: Get "https://192.168.49.2:8441/healthz": dial tcp 192.168.49.2:8441: connect: connection refused
	I0916 10:48:44.373541 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:46.967237 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	W0916 10:48:46.967252 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	I0916 10:48:46.967269 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:47.234554 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:47.234573 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:47.373856 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:47.381648 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:47.381668 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:47.874262 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:47.881898 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:47.881918 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:48.373987 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:48.381643 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
	ok
	I0916 10:48:48.395482 2090441 api_server.go:141] control plane version: v1.31.1
	I0916 10:48:48.395506 2090441 api_server.go:131] duration metric: took 7.522086105s to wait for apiserver health ...
	I0916 10:48:48.395515 2090441 cni.go:84] Creating CNI manager for ""
	I0916 10:48:48.395522 2090441 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:48:48.398238 2090441 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:48:48.400734 2090441 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:48:48.404728 2090441 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:48:48.404739 2090441 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:48:48.426027 2090441 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:48:48.844610 2090441 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:48:48.855743 2090441 system_pods.go:59] 8 kube-system pods found
	I0916 10:48:48.855765 2090441 system_pods.go:61] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running
	I0916 10:48:48.855775 2090441 system_pods.go:61] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 10:48:48.855785 2090441 system_pods.go:61] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:48.855802 2090441 system_pods.go:61] "kube-apiserver-functional-911502" [49c666a4-4c79-4cfd-9ed9-f8307aea2147] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 10:48:48.855809 2090441 system_pods.go:61] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 10:48:48.855818 2090441 system_pods.go:61] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:48.855824 2090441 system_pods.go:61] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running / Ready:ContainersNotReady (containers with unready status: [kube-scheduler]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-scheduler])
	I0916 10:48:48.855835 2090441 system_pods.go:61] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
	I0916 10:48:48.855842 2090441 system_pods.go:74] duration metric: took 11.220081ms to wait for pod list to return data ...
	I0916 10:48:48.855851 2090441 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:48:48.865905 2090441 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:48:48.865924 2090441 node_conditions.go:123] node cpu capacity is 2
	I0916 10:48:48.865935 2090441 node_conditions.go:105] duration metric: took 10.079887ms to run NodePressure ...
	I0916 10:48:48.865953 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase addon all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:49.117268 2090441 kubeadm.go:724] waiting for restarted kubelet to initialise ...
	I0916 10:48:49.129456 2090441 kubeadm.go:739] kubelet initialised
	I0916 10:48:49.129467 2090441 kubeadm.go:740] duration metric: took 12.186746ms waiting for restarted kubelet to initialise ...
	I0916 10:48:49.129475 2090441 pod_ready.go:36] extra waiting up to 4m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:49.137397 2090441 pod_ready.go:79] waiting up to 4m0s for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:49.144966 2090441 pod_ready.go:93] pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:49.144978 2090441 pod_ready.go:82] duration metric: took 7.566607ms for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:49.144988 2090441 pod_ready.go:79] waiting up to 4m0s for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:51.150942 2090441 pod_ready.go:103] pod "etcd-functional-911502" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:52.151654 2090441 pod_ready.go:93] pod "etcd-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:52.151667 2090441 pod_ready.go:82] duration metric: took 3.006671097s for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:52.151678 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:54.158468 2090441 pod_ready.go:103] pod "kube-apiserver-functional-911502" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:56.158238 2090441 pod_ready.go:93] pod "kube-apiserver-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.158254 2090441 pod_ready.go:82] duration metric: took 4.006566025s for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.158263 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.165526 2090441 pod_ready.go:93] pod "kube-controller-manager-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.165538 2090441 pod_ready.go:82] duration metric: took 7.267917ms for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.165547 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.171832 2090441 pod_ready.go:93] pod "kube-proxy-l59dx" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.171843 2090441 pod_ready.go:82] duration metric: took 6.290339ms for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.171853 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.177575 2090441 pod_ready.go:93] pod "kube-scheduler-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.177587 2090441 pod_ready.go:82] duration metric: took 5.727669ms for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.177598 2090441 pod_ready.go:39] duration metric: took 7.048114784s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:56.177613 2090441 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:48:56.185221 2090441 ops.go:34] apiserver oom_adj: -16
	I0916 10:48:56.185232 2090441 kubeadm.go:597] duration metric: took 33.279135817s to restartPrimaryControlPlane
	I0916 10:48:56.185240 2090441 kubeadm.go:394] duration metric: took 33.360438088s to StartCluster
	I0916 10:48:56.185255 2090441 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:56.185318 2090441 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:56.185924 2090441 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:56.186127 2090441 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:48:56.186461 2090441 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:56.186501 2090441 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:48:56.186566 2090441 addons.go:69] Setting storage-provisioner=true in profile "functional-911502"
	I0916 10:48:56.186578 2090441 addons.go:234] Setting addon storage-provisioner=true in "functional-911502"
	W0916 10:48:56.186583 2090441 addons.go:243] addon storage-provisioner should already be in state true
	I0916 10:48:56.186603 2090441 host.go:66] Checking if "functional-911502" exists ...
	I0916 10:48:56.186753 2090441 addons.go:69] Setting default-storageclass=true in profile "functional-911502"
	I0916 10:48:56.186769 2090441 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "functional-911502"
	I0916 10:48:56.187108 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:56.187112 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:56.191212 2090441 out.go:177] * Verifying Kubernetes components...
	I0916 10:48:56.194523 2090441 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:56.225453 2090441 addons.go:234] Setting addon default-storageclass=true in "functional-911502"
	W0916 10:48:56.225463 2090441 addons.go:243] addon default-storageclass should already be in state true
	I0916 10:48:56.225486 2090441 host.go:66] Checking if "functional-911502" exists ...
	I0916 10:48:56.225903 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:56.230244 2090441 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:48:56.235857 2090441 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:56.235869 2090441 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:48:56.235948 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:56.263017 2090441 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:56.263031 2090441 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:48:56.263097 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:56.282120 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:56.301735 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:56.353010 2090441 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:56.368505 2090441 node_ready.go:35] waiting up to 6m0s for node "functional-911502" to be "Ready" ...
	I0916 10:48:56.372081 2090441 node_ready.go:49] node "functional-911502" has status "Ready":"True"
	I0916 10:48:56.372091 2090441 node_ready.go:38] duration metric: took 3.567903ms for node "functional-911502" to be "Ready" ...
	I0916 10:48:56.372099 2090441 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:56.379540 2090441 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.407301 2090441 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:56.437317 2090441 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:56.555436 2090441 pod_ready.go:93] pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.555447 2090441 pod_ready.go:82] duration metric: took 175.892756ms for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.555456 2090441 pod_ready.go:79] waiting up to 6m0s for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.956056 2090441 pod_ready.go:93] pod "etcd-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.956068 2090441 pod_ready.go:82] duration metric: took 400.604402ms for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.956081 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.232685 2090441 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 10:48:57.235189 2090441 addons.go:510] duration metric: took 1.048680086s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 10:48:57.355420 2090441 pod_ready.go:93] pod "kube-apiserver-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:57.355430 2090441 pod_ready.go:82] duration metric: took 399.343184ms for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.355440 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.755583 2090441 pod_ready.go:93] pod "kube-controller-manager-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:57.755595 2090441 pod_ready.go:82] duration metric: took 400.148824ms for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.755605 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.155248 2090441 pod_ready.go:93] pod "kube-proxy-l59dx" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:58.155259 2090441 pod_ready.go:82] duration metric: took 399.64819ms for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.155269 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.555115 2090441 pod_ready.go:93] pod "kube-scheduler-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:58.555126 2090441 pod_ready.go:82] duration metric: took 399.851365ms for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.555137 2090441 pod_ready.go:39] duration metric: took 2.183026428s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:58.555151 2090441 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:48:58.555220 2090441 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:48:58.566810 2090441 api_server.go:72] duration metric: took 2.380658837s to wait for apiserver process to appear ...
	I0916 10:48:58.566825 2090441 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:48:58.566852 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:58.574536 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
	ok
	I0916 10:48:58.575555 2090441 api_server.go:141] control plane version: v1.31.1
	I0916 10:48:58.575570 2090441 api_server.go:131] duration metric: took 8.738422ms to wait for apiserver health ...
	I0916 10:48:58.575586 2090441 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:48:58.759444 2090441 system_pods.go:59] 8 kube-system pods found
	I0916 10:48:58.759464 2090441 system_pods.go:61] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 10:48:58.759469 2090441 system_pods.go:61] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running
	I0916 10:48:58.759474 2090441 system_pods.go:61] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:58.759478 2090441 system_pods.go:61] "kube-apiserver-functional-911502" [49c666a4-4c79-4cfd-9ed9-f8307aea2147] Running
	I0916 10:48:58.759482 2090441 system_pods.go:61] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running
	I0916 10:48:58.759485 2090441 system_pods.go:61] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:58.759488 2090441 system_pods.go:61] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running
	I0916 10:48:58.759493 2090441 system_pods.go:61] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
	I0916 10:48:58.759499 2090441 system_pods.go:74] duration metric: took 183.907696ms to wait for pod list to return data ...
	I0916 10:48:58.759506 2090441 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:48:58.956246 2090441 default_sa.go:45] found service account: "default"
	I0916 10:48:58.956260 2090441 default_sa.go:55] duration metric: took 196.748952ms for default service account to be created ...
	I0916 10:48:58.956270 2090441 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:48:59.159500 2090441 system_pods.go:86] 8 kube-system pods found
	I0916 10:48:59.159531 2090441 system_pods.go:89] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 10:48:59.159538 2090441 system_pods.go:89] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running
	I0916 10:48:59.159543 2090441 system_pods.go:89] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:59.159547 2090441 system_pods.go:89] "kube-apiserver-functional-911502" [49c666a4-4c79-4cfd-9ed9-f8307aea2147] Running
	I0916 10:48:59.159551 2090441 system_pods.go:89] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running
	I0916 10:48:59.159559 2090441 system_pods.go:89] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:59.159566 2090441 system_pods.go:89] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running
	I0916 10:48:59.159572 2090441 system_pods.go:89] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
	I0916 10:48:59.159582 2090441 system_pods.go:126] duration metric: took 203.306375ms to wait for k8s-apps to be running ...
	I0916 10:48:59.159588 2090441 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:48:59.159665 2090441 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:48:59.172329 2090441 system_svc.go:56] duration metric: took 12.729233ms WaitForService to wait for kubelet
	I0916 10:48:59.172348 2090441 kubeadm.go:582] duration metric: took 2.986201415s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:59.172365 2090441 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:48:59.356377 2090441 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:48:59.356393 2090441 node_conditions.go:123] node cpu capacity is 2
	I0916 10:48:59.356402 2090441 node_conditions.go:105] duration metric: took 184.03252ms to run NodePressure ...
	I0916 10:48:59.356414 2090441 start.go:241] waiting for startup goroutines ...
	I0916 10:48:59.356420 2090441 start.go:246] waiting for cluster config update ...
	I0916 10:48:59.356430 2090441 start.go:255] writing updated cluster config ...
	I0916 10:48:59.356743 2090441 ssh_runner.go:195] Run: rm -f paused
	I0916 10:48:59.365595 2090441 out.go:177] * Done! kubectl is now configured to use "functional-911502" cluster and "default" namespace by default
	E0916 10:48:59.367860 2090441 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	494883dd75dac       ba04bb24b9575       About a minute ago   Running             storage-provisioner       3                   334ec243859df       storage-provisioner
	0b22f9fb6da5e       ba04bb24b9575       2 minutes ago        Exited              storage-provisioner       2                   334ec243859df       storage-provisioner
	da9cb172fba10       d3f53a98c0a9d       2 minutes ago        Running             kube-apiserver            1                   6b31db950b5d2       kube-apiserver-functional-911502
	95188fff801b2       d3f53a98c0a9d       2 minutes ago        Exited              kube-apiserver            0                   6b31db950b5d2       kube-apiserver-functional-911502
	16a7dfc9e0119       7f8aa378bb47d       2 minutes ago        Running             kube-scheduler            1                   578f22ca4016c       kube-scheduler-functional-911502
	d954d9e91e01c       279f381cb3736       2 minutes ago        Running             kube-controller-manager   1                   54de1abbce22f       kube-controller-manager-functional-911502
	1a427a607f521       27e3830e14027       2 minutes ago        Running             etcd                      1                   e43a7a67672f1       etcd-functional-911502
	472eb48e2a576       6a23fa8fd2b78       2 minutes ago        Running             kindnet-cni               1                   b400f9b4bc923       kindnet-7r2rg
	1e1c55f6d316e       24a140c548c07       2 minutes ago        Running             kube-proxy                1                   c900cfd22280f       kube-proxy-l59dx
	381c4c4cdcc1a       2f6c962e7b831       2 minutes ago        Running             coredns                   1                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	11757969f67eb       2f6c962e7b831       2 minutes ago        Exited              coredns                   0                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	ce5a28d1cb5d2       6a23fa8fd2b78       3 minutes ago        Exited              kindnet-cni               0                   b400f9b4bc923       kindnet-7r2rg
	57c3cd94d0c59       24a140c548c07       3 minutes ago        Exited              kube-proxy                0                   c900cfd22280f       kube-proxy-l59dx
	492408bc37d38       27e3830e14027       3 minutes ago        Exited              etcd                      0                   e43a7a67672f1       etcd-functional-911502
	31265291ac7da       7f8aa378bb47d       3 minutes ago        Exited              kube-scheduler            0                   578f22ca4016c       kube-scheduler-functional-911502
	928f2c64d0a66       279f381cb3736       3 minutes ago        Exited              kube-controller-manager   0                   54de1abbce22f       kube-controller-manager-functional-911502
	
	
	==> containerd <==
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.070325855Z" level=error msg="ContainerStatus for \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\" failed" error="rpc error: code = NotFound desc = an error occurred when try to find container \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": not found"
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.079627605Z" level=info msg="CreateContainer within sandbox \"6b31db950b5d280a5feb49909dd8de1ece9d6371214eb94fe9fc239781f367ab\" for &ContainerMetadata{Name:kube-apiserver,Attempt:1,} returns container id \"da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12\""
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.080253645Z" level=info msg="StartContainer for \"da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12\""
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.223110276Z" level=info msg="StartContainer for \"da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12\" returns successfully"
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.551867636Z" level=info msg="CreateContainer within sandbox \"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c\" for container &ContainerMetadata{Name:storage-provisioner,Attempt:2,}"
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.568904337Z" level=info msg="CreateContainer within sandbox \"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c\" for &ContainerMetadata{Name:storage-provisioner,Attempt:2,} returns container id \"0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc\""
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.569729562Z" level=info msg="StartContainer for \"0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc\""
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.631010947Z" level=info msg="StartContainer for \"0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc\" returns successfully"
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.663906337Z" level=info msg="shim disconnected" id=0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc namespace=k8s.io
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.664118020Z" level=warning msg="cleaning up after shim disconnected" id=0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc namespace=k8s.io
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.664142586Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:48:48 functional-911502 containerd[3778]: time="2024-09-16T10:48:48.087297687Z" level=info msg="RemoveContainer for \"ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be\""
	Sep 16 10:48:48 functional-911502 containerd[3778]: time="2024-09-16T10:48:48.096408333Z" level=info msg="RemoveContainer for \"ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be\" returns successfully"
	Sep 16 10:49:01 functional-911502 containerd[3778]: time="2024-09-16T10:49:01.912835405Z" level=info msg="CreateContainer within sandbox \"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c\" for container &ContainerMetadata{Name:storage-provisioner,Attempt:3,}"
	Sep 16 10:49:01 functional-911502 containerd[3778]: time="2024-09-16T10:49:01.935474548Z" level=info msg="CreateContainer within sandbox \"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c\" for &ContainerMetadata{Name:storage-provisioner,Attempt:3,} returns container id \"494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb\""
	Sep 16 10:49:01 functional-911502 containerd[3778]: time="2024-09-16T10:49:01.942508333Z" level=info msg="StartContainer for \"494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb\""
	Sep 16 10:49:02 functional-911502 containerd[3778]: time="2024-09-16T10:49:02.043245100Z" level=info msg="StartContainer for \"494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb\" returns successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.771343568Z" level=info msg="StopPodSandbox for \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\""
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.771455657Z" level=info msg="TearDown network for sandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.771471157Z" level=info msg="StopPodSandbox for \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" returns successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.772324288Z" level=info msg="RemovePodSandbox for \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\""
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.772366979Z" level=info msg="Forcibly stopping sandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\""
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.772456357Z" level=info msg="TearDown network for sandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.777169287Z" level=warning msg="Failed to get podSandbox status for container event for sandboxID \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\": an error occurred when try to find sandbox: not found. Sending the event with nil podSandboxStatus."
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.777295785Z" level=info msg="RemovePodSandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" returns successfully"
	
	
	==> coredns [11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39497 - 35637 "HINFO IN 756688810597303784.5152931065563193714. udp 56 false 512" NXDOMAIN qr,rd,ra 56 0.040061481s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> coredns [381c4c4cdcc1a65a0e4d935f5449da5929d2fea3cf4e8c057860064146546ba0] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:57690 - 32762 "HINFO IN 7394603856605586965.6142061183963741332. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.012169588s
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: services is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "services" in API group "" at the cluster scope
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "services" in API group "" at the cluster scope
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: namespaces is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "namespaces" in API group "" at the cluster scope
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "namespaces" in API group "" at the cluster scope
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	
	
	==> describe nodes <==
	Name:               functional-911502
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-911502
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-911502
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_40_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:47:36 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-911502
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:50:45 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:36 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-911502
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 43a21049ac0d40628479cf884a8089e0
	  System UUID:                2830f6a5-4b63-46c5-b24a-468e4df19b79
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-6kw9d                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m7s
	  kube-system                 etcd-functional-911502                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m14s
	  kube-system                 kindnet-7r2rg                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m7s
	  kube-system                 kube-apiserver-functional-911502             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m4s
	  kube-system                 kube-controller-manager-functional-911502    200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m12s
	  kube-system                 kube-proxy-l59dx                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m7s
	  kube-system                 kube-scheduler-functional-911502             100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m12s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m7s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 3m6s                   kube-proxy       
	  Normal   Starting                 2m4s                   kube-proxy       
	  Normal   NodeAllocatableEnforced  3m12s                  kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 3m12s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m12s                  kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m12s                  kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m12s                  kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   Starting                 3m12s                  kubelet          Starting kubelet.
	  Normal   RegisteredNode           3m8s                   node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	  Normal   Starting                 2m11s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 2m11s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  2m10s (x8 over 2m10s)  kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    2m10s (x7 over 2m10s)  kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m10s (x7 over 2m10s)  kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  2m10s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           2m1s                   node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [1a427a607f52143d7ababbbe77d9ccd5fb21bed4f47e6ea656a489787066bdd5] <==
	{"level":"info","ts":"2024-09-16T10:48:38.695375Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:38.695629Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.695854Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.695783Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:38.699522Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:38.699870Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:38.700575Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.701150Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.700996Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:40.180057Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180110Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180142Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180157Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180164Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180195Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180204Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.183853Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:40.183916Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:40.184260Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:40.185178Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:40.186391Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:40.187443Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:40.188457Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:40.194734Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:40.194782Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> etcd [492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31] <==
	{"level":"info","ts":"2024-09-16T10:47:33.255286Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.255359Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.262769Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.270884Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:47:33.271109Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.271402Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.272620Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.283805Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.271431Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284172Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284977Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.289591Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.306735Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306879Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306916Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.344113Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:48:38.344168Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-911502","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:48:38.344265Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.344295Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.345876Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.345916Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:48:38.345965Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:48:38.347534Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.347628Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.347663Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-911502","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:50:51 up 1 day, 14:33,  0 users,  load average: 0.61, 1.00, 1.26
	Linux functional-911502 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [472eb48e2a57659caeaf99025beaec2f96e01b98d7d3d7676515ac24fb61fb58] <==
	I0916 10:48:50.321308       1 controller.go:374] Syncing nftables rules
	I0916 10:48:59.220364       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:59.220423       1 main.go:299] handling current node
	I0916 10:49:09.220620       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:09.220654       1 main.go:299] handling current node
	I0916 10:49:19.222817       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:19.222857       1 main.go:299] handling current node
	I0916 10:49:29.219698       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:29.219737       1 main.go:299] handling current node
	I0916 10:49:39.219237       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:39.219276       1 main.go:299] handling current node
	I0916 10:49:49.219131       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:49.219171       1 main.go:299] handling current node
	I0916 10:49:59.219970       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:59.220080       1 main.go:299] handling current node
	I0916 10:50:09.224203       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:09.224418       1 main.go:299] handling current node
	I0916 10:50:19.219828       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:19.219867       1 main.go:299] handling current node
	I0916 10:50:29.219398       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:29.219439       1 main.go:299] handling current node
	I0916 10:50:39.228234       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:39.228270       1 main.go:299] handling current node
	I0916 10:50:49.219663       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:49.219706       1 main.go:299] handling current node
	
	
	==> kindnet [ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b] <==
	I0916 10:47:45.041351       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:47:45.041663       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:47:45.041804       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:47:45.041819       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:47:45.041830       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:47:45.520963       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:47:45.521152       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:47:45.521205       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:47:45.722171       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:47:45.722199       1 metrics.go:61] Registering metrics
	I0916 10:47:45.722266       1 controller.go:374] Syncing nftables rules
	I0916 10:47:55.524822       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:47:55.524859       1 main.go:299] handling current node
	I0916 10:48:05.526968       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:05.527019       1 main.go:299] handling current node
	I0916 10:48:15.528186       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:15.528219       1 main.go:299] handling current node
	I0916 10:48:25.523558       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:25.523675       1 main.go:299] handling current node
	
	
	==> kube-apiserver [95188fff801b22ea7d5c57a472a58be0bc02010422f1867802de90863ce56801] <==
	I0916 10:48:42.839848       1 options.go:228] external host was not specified, using 192.168.49.2
	I0916 10:48:42.855248       1 server.go:142] Version: v1.31.1
	I0916 10:48:42.855626       1 server.go:144] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	E0916 10:48:42.856037       1 run.go:72] "command failed" err="failed to create listener: failed to listen on 0.0.0.0:8441: listen tcp 0.0.0.0:8441: bind: address already in use"
	
	
	==> kube-apiserver [da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12] <==
	I0916 10:48:47.150778       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:48:47.150982       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:48:47.151288       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:48:47.151407       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:48:47.151562       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:47.157426       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:47.157660       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:47.159024       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:48:47.159224       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:47.159303       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:47.159367       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:47.159432       1 cache.go:39] Caches are synced for autoregister controller
	I0916 10:48:47.189320       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:48:47.189540       1 policy_source.go:224] refreshing policies
	I0916 10:48:47.191266       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:48:47.230910       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:47.961268       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	W0916 10:48:48.300845       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2]
	I0916 10:48:48.302392       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:48:48.308164       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:48:48.837004       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:48:48.966544       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:48:48.985830       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:48:49.056843       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:48:49.065696       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	
	
	==> kube-controller-manager [928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a] <==
	I0916 10:47:43.260717       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:47:43.260741       1 shared_informer.go:320] Caches are synced for legacy-service-account-token-cleaner
	I0916 10:47:43.260789       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:47:43.264947       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:47:43.270925       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:43.274016       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:47:43.289621       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:47:43.309403       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:47:43.755815       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810781       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810815       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:47:43.822097       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:44.310052       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="384.204702ms"
	I0916 10:47:44.362819       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="52.71544ms"
	I0916 10:47:44.426732       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="63.859952ms"
	I0916 10:47:44.459758       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="32.950043ms"
	I0916 10:47:44.479101       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="19.296296ms"
	I0916 10:47:44.479182       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.386µs"
	I0916 10:47:46.277218       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="54.999µs"
	I0916 10:47:46.284221       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="47.499µs"
	I0916 10:47:46.289165       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.016µs"
	I0916 10:47:49.776695       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:56.302801       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="57.165µs"
	I0916 10:47:57.316220       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="18.588165ms"
	I0916 10:47:57.316368       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="102.169µs"
	
	
	==> kube-controller-manager [d954d9e91e01c0d0330d1084c35e9e29fd31db11cc4bc7b4efdc63e3c17afd45] <==
	I0916 10:48:50.420809       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:48:50.420721       1 shared_informer.go:320] Caches are synced for validatingadmissionpolicy-status
	I0916 10:48:50.421043       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="99.405µs"
	I0916 10:48:50.423833       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:50.426496       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:50.431094       1 shared_informer.go:320] Caches are synced for TTL after finished
	I0916 10:48:50.433168       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:50.436096       1 shared_informer.go:320] Caches are synced for ReplicationController
	I0916 10:48:50.508138       1 shared_informer.go:320] Caches are synced for HPA
	I0916 10:48:50.570351       1 shared_informer.go:320] Caches are synced for expand
	I0916 10:48:50.594635       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:48:50.603270       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:48:50.620872       1 shared_informer.go:320] Caches are synced for ephemeral
	I0916 10:48:50.621193       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:50.621458       1 shared_informer.go:320] Caches are synced for PVC protection
	I0916 10:48:50.638042       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:50.660352       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:48:50.681442       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:51.072854       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:51.089681       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:51.090358       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:48:56.739270       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="40.969915ms"
	I0916 10:48:56.739344       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="41.895µs"
	I0916 10:49:06.732627       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="25.090829ms"
	I0916 10:49:06.733069       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="63.655µs"
	
	
	==> kube-proxy [1e1c55f6d316ecfe21daf226fdfd7a0c46b596f2904f3881ac5d62f21f9aa385] <==
	I0916 10:48:28.645183       1 server_linux.go:66] "Using iptables proxy"
	E0916 10:48:28.721587       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:29.873863       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:32.256620       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:36.493031       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	I0916 10:48:47.226267       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:47.226420       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:47.297852       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:47.298129       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:47.300753       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:47.301480       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:47.301624       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:47.312238       1 config.go:199] "Starting service config controller"
	I0916 10:48:47.312282       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:47.312370       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:47.312419       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:47.317887       1 config.go:328] "Starting node config controller"
	I0916 10:48:47.317941       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:47.413200       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:47.413370       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:47.418734       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6] <==
	I0916 10:47:44.794046       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:47:44.895525       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:47:44.895614       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:47:44.916570       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:47:44.916637       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:47:44.918597       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:47:44.919425       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:47:44.919452       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:47:44.925419       1 config.go:199] "Starting service config controller"
	I0916 10:47:44.925472       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:47:44.925521       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:47:44.925531       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:47:44.928903       1 config.go:328] "Starting node config controller"
	I0916 10:47:44.928927       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:47:45.030179       1 shared_informer.go:320] Caches are synced for node config
	I0916 10:47:45.030253       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:47:45.030287       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [16a7dfc9e01198cab582eac129b2a3162c14e01f4ade815bbd0695fd67b02c4c] <==
	I0916 10:48:43.411981       1 serving.go:386] Generated self-signed cert in-memory
	W0916 10:48:47.054124       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 10:48:47.054270       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 10:48:47.054329       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 10:48:47.054367       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 10:48:47.137658       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:47.137797       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:47.143443       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:47.143739       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:47.146739       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:47.146832       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:47.247771       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44] <==
	E0916 10:47:37.419493       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419534       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:47:37.419548       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419590       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:47:37.419607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419645       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419660       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419704       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419719       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422428       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:47:37.422472       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422530       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:47:37.422547       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422779       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 10:47:37.422805       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 10:47:37.422862       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:47:37.422883       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424481       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.424516       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424588       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:47:37.424607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424692       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:47:37.424724       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:47:38.910602       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	E0916 10:48:38.401394       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kubelet <==
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.041619    4699 status_manager.go:851] "Failed to get status for pod" podUID="c7ab8017ca620f2ba7e026f4cdb427a2" pod="kube-system/etcd-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/etcd-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.041817    4699 status_manager.go:851] "Failed to get status for pod" podUID="0082f76f53cc9a35311f900de9a4ce8a" pod="kube-system/kube-apiserver-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.041988    4699 status_manager.go:851] "Failed to get status for pod" podUID="26c4a2e985a1c721e0411e5d9497a35b" pod="kube-system/kube-controller-manager-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.042149    4699 status_manager.go:851] "Failed to get status for pod" podUID="69dae9ff35c780f43a15f539d6f19e46" pod="kube-system/kube-scheduler-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.042319    4699 status_manager.go:851] "Failed to get status for pod" podUID="72a26843-9f97-4121-91f0-3cb389048315" pod="kube-system/kube-proxy-l59dx" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-proxy-l59dx\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.052916    4699 scope.go:117] "RemoveContainer" containerID="a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.064122    4699 scope.go:117] "RemoveContainer" containerID="dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.069838    4699 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = an error occurred when try to find container \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\": not found" containerID="dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.069900    4699 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa"} err="failed to get container status \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\": rpc error: code = NotFound desc = an error occurred when try to find container \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\": not found"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.069984    4699 scope.go:117] "RemoveContainer" containerID="a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.070544    4699 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = an error occurred when try to find container \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": not found" containerID="a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.070591    4699 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"} err="failed to get container status \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": rpc error: code = NotFound desc = an error occurred when try to find container \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": not found"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: W0916 10:48:44.086224    4699 reflector.go:561] object-"kube-system"/"coredns": failed to list *v1.ConfigMap: Get "https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/configmaps?fieldSelector=metadata.name%3Dcoredns&resourceVersion=447": dial tcp 192.168.49.2:8441: connect: connection refused
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.086285    4699 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"coredns\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/configmaps?fieldSelector=metadata.name%3Dcoredns&resourceVersion=447\": dial tcp 192.168.49.2:8441: connect: connection refused" logger="UnhandledError"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: W0916 10:48:44.257240    4699 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://control-plane.minikube.internal:8441/apis/node.k8s.io/v1/runtimeclasses?resourceVersion=447": dial tcp 192.168.49.2:8441: connect: connection refused
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.257307    4699 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://control-plane.minikube.internal:8441/apis/node.k8s.io/v1/runtimeclasses?resourceVersion=447\": dial tcp 192.168.49.2:8441: connect: connection refused" logger="UnhandledError"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.911110    4699 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="846e81f7bcac6804cf5ef499ea5ac265" path="/var/lib/kubelet/pods/846e81f7bcac6804cf5ef499ea5ac265/volumes"
	Sep 16 10:48:45 functional-911502 kubelet[4699]: I0916 10:48:45.045335    4699 kubelet.go:1895] "Trying to delete pod" pod="kube-system/kube-apiserver-functional-911502" podUID="d399bd77-51dd-4ad3-90d4-6cf11e9e156e"
	Sep 16 10:48:47 functional-911502 kubelet[4699]: I0916 10:48:47.316886    4699 kubelet.go:1900] "Deleted mirror pod because it is outdated" pod="kube-system/kube-apiserver-functional-911502"
	Sep 16 10:48:47 functional-911502 kubelet[4699]: I0916 10:48:47.549623    4699 scope.go:117] "RemoveContainer" containerID="ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: I0916 10:48:48.083956    4699 scope.go:117] "RemoveContainer" containerID="ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: I0916 10:48:48.084957    4699 scope.go:117] "RemoveContainer" containerID="0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: E0916 10:48:48.085234    4699 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"storage-provisioner\" with CrashLoopBackOff: \"back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(ecac562d-8318-4226-b5f1-61f2c76bb51b)\"" pod="kube-system/storage-provisioner" podUID="ecac562d-8318-4226-b5f1-61f2c76bb51b"
	Sep 16 10:48:51 functional-911502 kubelet[4699]: I0916 10:48:51.367187    4699 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-apiserver-functional-911502" podStartSLOduration=4.367170601 podStartE2EDuration="4.367170601s" podCreationTimestamp="2024-09-16 10:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:48:48.153684049 +0000 UTC m=+7.599562137" watchObservedRunningTime="2024-09-16 10:48:51.367170601 +0000 UTC m=+10.813048706"
	Sep 16 10:49:01 functional-911502 kubelet[4699]: I0916 10:49:01.908487    4699 scope.go:117] "RemoveContainer" containerID="0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc"
	
	
	==> storage-provisioner [0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc] <==
	I0916 10:48:47.636953       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	F0916 10:48:47.638558       1 main.go:39] error getting server version: Get "https://10.96.0.1:443/version?timeout=32s": dial tcp 10.96.0.1:443: connect: connection refused
	
	
	==> storage-provisioner [494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb] <==
	I0916 10:49:02.043876       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:49:02.059730       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:49:02.059783       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:49:19.457349       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:49:19.457523       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5!
	I0916 10:49:19.457940       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"28a802e4-0156-4c92-adef-4d6f2592a206", APIVersion:"v1", ResourceVersion:"554", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5 became leader
	I0916 10:49:19.558377       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-911502 -n functional-911502
helpers_test.go:261: (dbg) Run:  kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (487.841µs)
helpers_test.go:263: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/parallel/ServiceCmdConnect (2.55s)

                                                
                                    
x
+
TestFunctional/parallel/PersistentVolumeClaim (101.82s)

                                                
                                                
=== RUN   TestFunctional/parallel/PersistentVolumeClaim
=== PAUSE TestFunctional/parallel/PersistentVolumeClaim

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PersistentVolumeClaim
functional_test_pvc_test.go:44: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 4m0s for pods matching "integration-test=storage-provisioner" in namespace "kube-system" ...
helpers_test.go:344: "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running
functional_test_pvc_test.go:44: (dbg) TestFunctional/parallel/PersistentVolumeClaim: integration-test=storage-provisioner healthy within 6.00379655s
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (528.965µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (512.899µs)
E0916 10:49:14.939377 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (572.706µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (450.483µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (404.666µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (544.62µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (388.264µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (513.884µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (551.513µs)
E0916 10:49:55.901676 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (390.865µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (471.989µs)
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-911502 get storageclass -o=json
functional_test_pvc_test.go:49: (dbg) Non-zero exit: kubectl --context functional-911502 get storageclass -o=json: fork/exec /usr/local/bin/kubectl: exec format error (514.409µs)
functional_test_pvc_test.go:65: failed to check for storage class: fork/exec /usr/local/bin/kubectl: exec format error
functional_test_pvc_test.go:69: (dbg) Run:  kubectl --context functional-911502 apply -f testdata/storage-provisioner/pvc.yaml
functional_test_pvc_test.go:69: (dbg) Non-zero exit: kubectl --context functional-911502 apply -f testdata/storage-provisioner/pvc.yaml: fork/exec /usr/local/bin/kubectl: exec format error (282.812µs)
functional_test_pvc_test.go:71: kubectl apply pvc.yaml failed: args "kubectl --context functional-911502 apply -f testdata/storage-provisioner/pvc.yaml": fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/parallel/PersistentVolumeClaim]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-911502
helpers_test.go:235: (dbg) docker inspect functional-911502:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1",
	        "Created": "2024-09-16T10:47:14.597354828Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2085675,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:47:14.7319597Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hostname",
	        "HostsPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hosts",
	        "LogPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1-json.log",
	        "Name": "/functional-911502",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-911502:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-911502",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/merged",
	                "UpperDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/diff",
	                "WorkDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-911502",
	                "Source": "/var/lib/docker/volumes/functional-911502/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-911502",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-911502",
	                "name.minikube.sigs.k8s.io": "functional-911502",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "44bb4c6f2ec0f0eef04adb8f886d0e0de7d31ae50612de741bed0ee945b2b75e",
	            "SandboxKey": "/var/run/docker/netns/44bb4c6f2ec0",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40592"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40593"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40596"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40594"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40595"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-911502": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "8c4428adf23c812318456ac17bea5953b33d7961994dfc84c0ff82a45764b662",
	                    "EndpointID": "8b3cc6f2c9f87b61b7e755d7ecd320ed6313887dfb3deab9f4e0858aa1c9fe80",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-911502",
	                        "9bf795605895"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-911502 -n functional-911502
helpers_test.go:244: <<< TestFunctional/parallel/PersistentVolumeClaim FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/PersistentVolumeClaim]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 logs -n 25: (1.844006684s)
helpers_test.go:252: TestFunctional/parallel/PersistentVolumeClaim logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| Command |                                   Args                                   |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| ssh     | functional-911502                                                        | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | ssh sudo crictl rmi                                                      |                   |         |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC |                     |
	|         | sudo crictl inspecti                                                     |                   |         |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| cache   | functional-911502 cache reload                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	| ssh     | functional-911502 ssh                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | sudo crictl inspecti                                                     |                   |         |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:3.1                                                |                   |         |         |                     |                     |
	| cache   | delete                                                                   | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | registry.k8s.io/pause:latest                                             |                   |         |         |                     |                     |
	| kubectl | functional-911502 kubectl --                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | --context functional-911502                                              |                   |         |         |                     |                     |
	|         | get pods                                                                 |                   |         |         |                     |                     |
	| start   | -p functional-911502                                                     | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:48 UTC | 16 Sep 24 10:48 UTC |
	|         | --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision |                   |         |         |                     |                     |
	|         | --wait=all                                                               |                   |         |         |                     |                     |
	| config  | functional-911502 config unset                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| cp      | functional-911502 cp                                                     | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | testdata/cp-test.txt                                                     |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| config  | functional-911502 config get                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| config  | functional-911502 config set                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus 2                                                                   |                   |         |         |                     |                     |
	| config  | functional-911502 config get                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| config  | functional-911502 config unset                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh -n                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-911502 sudo cat                                               |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| config  | functional-911502 config get                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | cpus                                                                     |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh echo                                               | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | hello                                                                    |                   |         |         |                     |                     |
	| cp      | functional-911502 cp                                                     | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-911502:/home/docker/cp-test.txt                               |                   |         |         |                     |                     |
	|         | /tmp/TestFunctionalparallelCpCmd3382788966/001/cp-test.txt               |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh cat                                                | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | /etc/hostname                                                            |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh -n                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-911502 sudo cat                                               |                   |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                 |                   |         |         |                     |                     |
	| tunnel  | functional-911502 tunnel                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| tunnel  | functional-911502 tunnel                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| cp      | functional-911502 cp                                                     | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | testdata/cp-test.txt                                                     |                   |         |         |                     |                     |
	|         | /tmp/does/not/exist/cp-test.txt                                          |                   |         |         |                     |                     |
	| tunnel  | functional-911502 tunnel                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC |                     |
	|         | --alsologtostderr                                                        |                   |         |         |                     |                     |
	| ssh     | functional-911502 ssh -n                                                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:49 UTC | 16 Sep 24 10:49 UTC |
	|         | functional-911502 sudo cat                                               |                   |         |         |                     |                     |
	|         | /tmp/does/not/exist/cp-test.txt                                          |                   |         |         |                     |                     |
	|---------|--------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:48:18
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:48:18.056558 2090441 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:48:18.056731 2090441 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:18.056736 2090441 out.go:358] Setting ErrFile to fd 2...
	I0916 10:48:18.056744 2090441 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:48:18.057119 2090441 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:48:18.057904 2090441 out.go:352] Setting JSON to false
	I0916 10:48:18.059612 2090441 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":138640,"bootTime":1726345058,"procs":201,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:48:18.060116 2090441 start.go:139] virtualization:  
	I0916 10:48:18.063723 2090441 out.go:177] * [functional-911502] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:48:18.066427 2090441 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:48:18.066506 2090441 notify.go:220] Checking for updates...
	I0916 10:48:18.071616 2090441 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:48:18.074552 2090441 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:18.077059 2090441 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:48:18.079403 2090441 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:48:18.081886 2090441 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:48:18.085175 2090441 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:18.085267 2090441 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:48:18.118972 2090441 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:48:18.119082 2090441 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:18.187814 2090441 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:84 SystemTime:2024-09-16 10:48:18.177439051 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:18.187915 2090441 docker.go:318] overlay module found
	I0916 10:48:18.190819 2090441 out.go:177] * Using the docker driver based on existing profile
	I0916 10:48:18.193361 2090441 start.go:297] selected driver: docker
	I0916 10:48:18.193371 2090441 start.go:901] validating driver "docker" against &{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountU
ID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:18.193484 2090441 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:48:18.193591 2090441 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:48:18.252173 2090441 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:84 SystemTime:2024-09-16 10:48:18.241918971 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:48:18.252721 2090441 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:18.252750 2090441 cni.go:84] Creating CNI manager for ""
	I0916 10:48:18.252804 2090441 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:48:18.252849 2090441 start.go:340] cluster config:
	{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUI
D:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:18.255882 2090441 out.go:177] * Starting "functional-911502" primary control-plane node in "functional-911502" cluster
	I0916 10:48:18.258466 2090441 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:48:18.261084 2090441 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:48:18.263652 2090441 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:48:18.263697 2090441 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:48:18.263702 2090441 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:48:18.263711 2090441 cache.go:56] Caching tarball of preloaded images
	I0916 10:48:18.263789 2090441 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:48:18.263812 2090441 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:48:18.263919 2090441 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/config.json ...
	W0916 10:48:18.282640 2090441 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:48:18.282651 2090441 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:48:18.282779 2090441 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:48:18.282796 2090441 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:48:18.282799 2090441 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:48:18.282806 2090441 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:48:18.282811 2090441 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:48:18.403176 2090441 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:48:18.403224 2090441 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:48:18.403279 2090441 start.go:360] acquireMachinesLock for functional-911502: {Name:mk182321dd921c9bc14d73d2af41d001efc879fd Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:48:18.403399 2090441 start.go:364] duration metric: took 82.79µs to acquireMachinesLock for "functional-911502"
	I0916 10:48:18.403430 2090441 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:48:18.403435 2090441 fix.go:54] fixHost starting: 
	I0916 10:48:18.403813 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:18.421104 2090441 fix.go:112] recreateIfNeeded on functional-911502: state=Running err=<nil>
	W0916 10:48:18.421131 2090441 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:48:18.424219 2090441 out.go:177] * Updating the running docker "functional-911502" container ...
	I0916 10:48:18.426570 2090441 machine.go:93] provisionDockerMachine start ...
	I0916 10:48:18.426707 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:18.443892 2090441 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:18.444150 2090441 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:48:18.444157 2090441 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:48:18.582615 2090441 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-911502
	
	I0916 10:48:18.582628 2090441 ubuntu.go:169] provisioning hostname "functional-911502"
	I0916 10:48:18.582729 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:18.601133 2090441 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:18.601387 2090441 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:48:18.601402 2090441 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-911502 && echo "functional-911502" | sudo tee /etc/hostname
	I0916 10:48:18.750562 2090441 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-911502
	
	I0916 10:48:18.750635 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:18.768271 2090441 main.go:141] libmachine: Using SSH client type: native
	I0916 10:48:18.768521 2090441 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40592 <nil> <nil>}
	I0916 10:48:18.768536 2090441 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-911502' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-911502/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-911502' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:48:18.907246 2090441 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:48:18.907263 2090441 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:48:18.907293 2090441 ubuntu.go:177] setting up certificates
	I0916 10:48:18.907302 2090441 provision.go:84] configureAuth start
	I0916 10:48:18.907364 2090441 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-911502
	I0916 10:48:18.927347 2090441 provision.go:143] copyHostCerts
	I0916 10:48:18.927406 2090441 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:48:18.927422 2090441 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:48:18.927486 2090441 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:48:18.927589 2090441 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:48:18.927593 2090441 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:48:18.927630 2090441 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:48:18.927703 2090441 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:48:18.927706 2090441 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:48:18.927733 2090441 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:48:18.927784 2090441 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.functional-911502 san=[127.0.0.1 192.168.49.2 functional-911502 localhost minikube]
	I0916 10:48:19.991257 2090441 provision.go:177] copyRemoteCerts
	I0916 10:48:19.991315 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:48:19.991358 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.029993 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.128591 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:48:20.156277 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1220 bytes)
	I0916 10:48:20.183185 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:48:20.209618 2090441 provision.go:87] duration metric: took 1.302302469s to configureAuth
	I0916 10:48:20.209635 2090441 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:48:20.209838 2090441 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:20.209844 2090441 machine.go:96] duration metric: took 1.783266636s to provisionDockerMachine
	I0916 10:48:20.209851 2090441 start.go:293] postStartSetup for "functional-911502" (driver="docker")
	I0916 10:48:20.209861 2090441 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:48:20.209924 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:48:20.209968 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.227087 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.323907 2090441 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:48:20.327142 2090441 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:48:20.327167 2090441 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:48:20.327179 2090441 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:48:20.327185 2090441 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:48:20.327194 2090441 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:48:20.327249 2090441 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:48:20.327327 2090441 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:48:20.327402 2090441 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts -> hosts in /etc/test/nested/copy/2063326
	I0916 10:48:20.327447 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/2063326
	I0916 10:48:20.336043 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:48:20.360460 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts --> /etc/test/nested/copy/2063326/hosts (40 bytes)
	I0916 10:48:20.385297 2090441 start.go:296] duration metric: took 175.431776ms for postStartSetup
	I0916 10:48:20.385378 2090441 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:48:20.385419 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.402295 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.495689 2090441 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:48:20.500214 2090441 fix.go:56] duration metric: took 2.096771088s for fixHost
	I0916 10:48:20.500228 2090441 start.go:83] releasing machines lock for "functional-911502", held for 2.096820746s
	I0916 10:48:20.500311 2090441 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" functional-911502
	I0916 10:48:20.517203 2090441 ssh_runner.go:195] Run: cat /version.json
	I0916 10:48:20.517249 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.517492 2090441 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:48:20.517559 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:20.534860 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.538812 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:20.630195 2090441 ssh_runner.go:195] Run: systemctl --version
	I0916 10:48:20.761370 2090441 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:48:20.765788 2090441 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:48:20.785230 2090441 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:48:20.785301 2090441 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:48:20.794254 2090441 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:48:20.794269 2090441 start.go:495] detecting cgroup driver to use...
	I0916 10:48:20.794301 2090441 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:48:20.794353 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:48:20.807440 2090441 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:48:20.819052 2090441 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:48:20.819108 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:48:20.832763 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:48:20.845443 2090441 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:48:20.973966 2090441 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:48:21.096206 2090441 docker.go:233] disabling docker service ...
	I0916 10:48:21.096283 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:48:21.120231 2090441 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:48:21.134274 2090441 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:48:21.245027 2090441 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:48:21.353592 2090441 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:48:21.366138 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:48:21.385216 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:48:21.397039 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:48:21.408473 2090441 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:48:21.408530 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:48:21.419890 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:48:21.430748 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:48:21.441177 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:48:21.452981 2090441 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:48:21.463398 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:48:21.474455 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:48:21.485837 2090441 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:48:21.495989 2090441 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:48:21.504821 2090441 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:48:21.514007 2090441 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:21.630033 2090441 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:48:21.929885 2090441 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:48:21.929958 2090441 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:48:21.934261 2090441 start.go:563] Will wait 60s for crictl version
	I0916 10:48:21.934371 2090441 ssh_runner.go:195] Run: which crictl
	I0916 10:48:21.937870 2090441 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:48:21.977142 2090441 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:48:21.977214 2090441 ssh_runner.go:195] Run: containerd --version
	I0916 10:48:21.999841 2090441 ssh_runner.go:195] Run: containerd --version
	I0916 10:48:22.030027 2090441 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:48:22.032799 2090441 cli_runner.go:164] Run: docker network inspect functional-911502 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:48:22.049379 2090441 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:48:22.056006 2090441 out.go:177]   - apiserver.enable-admission-plugins=NamespaceAutoProvision
	I0916 10:48:22.058597 2090441 kubeadm.go:883] updating cluster {Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA API
ServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: Mou
ntMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:48:22.058756 2090441 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:48:22.058847 2090441 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:22.096492 2090441 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:48:22.096505 2090441 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:48:22.096567 2090441 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:48:22.140078 2090441 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:48:22.140090 2090441 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:48:22.140096 2090441 kubeadm.go:934] updating node { 192.168.49.2 8441 v1.31.1 containerd true true} ...
	I0916 10:48:22.140203 2090441 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=functional-911502 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:48:22.140274 2090441 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:48:22.179208 2090441 extraconfig.go:124] Overwriting default enable-admission-plugins=NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota with user provided enable-admission-plugins=NamespaceAutoProvision for component apiserver
	I0916 10:48:22.179227 2090441 cni.go:84] Creating CNI manager for ""
	I0916 10:48:22.179236 2090441 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:48:22.179244 2090441 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:48:22.179266 2090441 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8441 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:functional-911502 NodeName:functional-911502 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceAutoProvision] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfi
gOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:48:22.179387 2090441 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8441
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "functional-911502"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceAutoProvision"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8441
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:48:22.179477 2090441 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:48:22.188765 2090441 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:48:22.188832 2090441 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 10:48:22.197782 2090441 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (321 bytes)
	I0916 10:48:22.216639 2090441 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:48:22.234768 2090441 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2021 bytes)
	I0916 10:48:22.253522 2090441 ssh_runner.go:195] Run: grep 192.168.49.2	control-plane.minikube.internal$ /etc/hosts
	I0916 10:48:22.257457 2090441 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:22.374849 2090441 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:22.388543 2090441 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502 for IP: 192.168.49.2
	I0916 10:48:22.388555 2090441 certs.go:194] generating shared ca certs ...
	I0916 10:48:22.388570 2090441 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:22.388723 2090441 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:48:22.388763 2090441 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:48:22.388769 2090441 certs.go:256] generating profile certs ...
	I0916 10:48:22.388849 2090441 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.key
	I0916 10:48:22.388891 2090441 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key.03a9d60c
	I0916 10:48:22.388929 2090441 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key
	I0916 10:48:22.389051 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:48:22.389077 2090441 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:48:22.389085 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:48:22.389109 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:48:22.389129 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:48:22.389149 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:48:22.389190 2090441 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:48:22.389803 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:48:22.417755 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:48:22.444790 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:48:22.469829 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:48:22.494601 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:48:22.519153 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:48:22.545011 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:48:22.569691 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1679 bytes)
	I0916 10:48:22.595014 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:48:22.619318 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:48:22.644259 2090441 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:48:22.668772 2090441 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:48:22.686564 2090441 ssh_runner.go:195] Run: openssl version
	I0916 10:48:22.692173 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:48:22.702850 2090441 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:22.706341 2090441 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:22.706407 2090441 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:48:22.713233 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:48:22.722032 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:48:22.731748 2090441 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:48:22.735358 2090441 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:48:22.735429 2090441 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:48:22.742406 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:48:22.751458 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:48:22.760959 2090441 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:48:22.764337 2090441 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:48:22.764391 2090441 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:48:22.771679 2090441 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:48:22.780664 2090441 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:48:22.784169 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:48:22.790852 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:48:22.797686 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:48:22.804722 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:48:22.811546 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:48:22.818077 2090441 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:48:22.824812 2090441 kubeadm.go:392] StartCluster: {Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISer
verNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountM
Size:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:48:22.824893 2090441 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:48:22.824962 2090441 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:48:22.863779 2090441 cri.go:89] found id: "11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950"
	I0916 10:48:22.863790 2090441 cri.go:89] found id: "8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27"
	I0916 10:48:22.863793 2090441 cri.go:89] found id: "ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b"
	I0916 10:48:22.863796 2090441 cri.go:89] found id: "57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6"
	I0916 10:48:22.863798 2090441 cri.go:89] found id: "a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	I0916 10:48:22.863801 2090441 cri.go:89] found id: "492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31"
	I0916 10:48:22.863804 2090441 cri.go:89] found id: "31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44"
	I0916 10:48:22.863812 2090441 cri.go:89] found id: "928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a"
	I0916 10:48:22.863814 2090441 cri.go:89] found id: ""
	I0916 10:48:22.863867 2090441 ssh_runner.go:195] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0916 10:48:22.896045 2090441 cri.go:116] JSON = [{"ociVersion":"1.0.2-dev","id":"11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","pid":2200,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950/rootfs","created":"2024-09-16T10:47:56.240222491Z","annotations":{"io.kubernetes.cri.container-name":"coredns","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/coredns/coredns:v1.11.3","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"31265291ac7da492c3c
fad84540ba2b684cdf0abad82be5c56d392df7613dc44","pid":1422,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44/rootfs","created":"2024-09-16T10:47:32.746424926Z","annotations":{"io.kubernetes.cri.container-name":"kube-scheduler","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-scheduler:v1.31.1","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","pid":1960,"status":"running","bundle":"/run/containe
rd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c/rootfs","created":"2024-09-16T10:47:45.184280835Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_storage-provisioner_ecac562d-8318-4226-b5f1-61f2c76bb51b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6
b99ce6c31","pid":1486,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31/rootfs","created":"2024-09-16T10:47:32.919386537Z","annotations":{"io.kubernetes.cri.container-name":"etcd","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/etcd:3.5.15-0","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","pid":1315,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de05
1f556893ed4577eb8b5d9d38835da8d64517b7a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a/rootfs","created":"2024-09-16T10:47:32.575723929Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"256","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-apiserver-functional-911502_846e81f7bcac6804cf5ef499ea5ac265","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","pid":1316,"status":"running","bun
dle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f/rootfs","created":"2024-09-16T10:47:32.562785558Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"204","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-controller-manager-functional-911502_26c4a2e985a1c721e0411e5d9497a35b","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id"
:"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","pid":1338,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7/rootfs","created":"2024-09-16T10:47:32.61203341Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-scheduler-functional-911502_69dae9ff35c780f43a15f539d6f19e46","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-scheduler-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid"
:"69dae9ff35c780f43a15f539d6f19e46"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","pid":1792,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6/rootfs","created":"2024-09-16T10:47:44.701696235Z","annotations":{"io.kubernetes.cri.container-name":"kube-proxy","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-proxy:v1.31.1","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"8aede76947864ca07593bc24b939a29faf7
bb7dd85244f30f18f232f3ec1ea27","pid":2092,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27/rootfs","created":"2024-09-16T10:47:45.396938875Z","annotations":{"io.kubernetes.cri.container-name":"storage-provisioner","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"gcr.io/k8s-minikube/storage-provisioner:v5","io.kubernetes.cri.sandbox-id":"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c","io.kubernetes.cri.sandbox-name":"storage-provisioner","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ecac562d-8318-4226-b5f1-61f2c76bb51b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","pid":1399,"status":"running","bundle":"/run/containerd/io.containerd
.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a/rootfs","created":"2024-09-16T10:47:32.722647206Z","annotations":{"io.kubernetes.cri.container-name":"kube-controller-manager","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-controller-manager:v1.31.1","io.kubernetes.cri.sandbox-id":"54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f","io.kubernetes.cri.sandbox-name":"kube-controller-manager-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"26c4a2e985a1c721e0411e5d9497a35b"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","pid":2166,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3e
e6fc0070983ddfd","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd/rootfs","created":"2024-09-16T10:47:56.156755788Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_coredns-7c65d6cfc9-6kw9d_072167c7-fa1a-463e-a957-91ea24020387","io.kubernetes.cri.sandbox-memory":"178257920","io.kubernetes.cri.sandbox-name":"coredns-7c65d6cfc9-6kw9d","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"072167c7-fa1a-463e-a957-91ea24020387"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","pid":1479,"status":"running","bundle":"/run/containerd/io
.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19/rootfs","created":"2024-09-16T10:47:32.854949973Z","annotations":{"io.kubernetes.cri.container-name":"kube-apiserver","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"registry.k8s.io/kube-apiserver:v1.31.1","io.kubernetes.cri.sandbox-id":"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a","io.kubernetes.cri.sandbox-name":"kube-apiserver-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"846e81f7bcac6804cf5ef499ea5ac265"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","pid":1768,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e"
,"rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e/rootfs","created":"2024-09-16T10:47:44.62156803Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"10000","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kindnet-7r2rg_ab52a601-e0fe-4f60-a202-477487da9bb2","io.kubernetes.cri.sandbox-memory":"52428800","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","pid":1733,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c
900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1/rootfs","created":"2024-09-16T10:47:44.525242585Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"2","io.kubernetes.cri.sandbox-id":"c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_kube-proxy-l59dx_72a26843-9f97-4121-91f0-3cb389048315","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"kube-proxy-l59dx","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"72a26843-9f97-4121-91f0-3cb389048315"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","pid":1835,"status":"running","bundl
e":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b/rootfs","created":"2024-09-16T10:47:44.827883907Z","annotations":{"io.kubernetes.cri.container-name":"kindnet-cni","io.kubernetes.cri.container-type":"container","io.kubernetes.cri.image-name":"docker.io/kindest/kindnetd:v20240813-c6f155d6","io.kubernetes.cri.sandbox-id":"b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e","io.kubernetes.cri.sandbox-name":"kindnet-7r2rg","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"ab52a601-e0fe-4f60-a202-477487da9bb2"},"owner":"root"},{"ociVersion":"1.0.2-dev","id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","pid":1295,"status":"running","bundle":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedb
e08802f86e","rootfs":"/run/containerd/io.containerd.runtime.v2.task/k8s.io/e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e/rootfs","created":"2024-09-16T10:47:32.534113117Z","annotations":{"io.kubernetes.cri.container-type":"sandbox","io.kubernetes.cri.sandbox-cpu-period":"100000","io.kubernetes.cri.sandbox-cpu-quota":"0","io.kubernetes.cri.sandbox-cpu-shares":"102","io.kubernetes.cri.sandbox-id":"e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e","io.kubernetes.cri.sandbox-log-directory":"/var/log/pods/kube-system_etcd-functional-911502_c7ab8017ca620f2ba7e026f4cdb427a2","io.kubernetes.cri.sandbox-memory":"0","io.kubernetes.cri.sandbox-name":"etcd-functional-911502","io.kubernetes.cri.sandbox-namespace":"kube-system","io.kubernetes.cri.sandbox-uid":"c7ab8017ca620f2ba7e026f4cdb427a2"},"owner":"root"}]
	I0916 10:48:22.896348 2090441 cri.go:126] list returned 16 containers
	I0916 10:48:22.896356 2090441 cri.go:129] container: {ID:11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 Status:running}
	I0916 10:48:22.896377 2090441 cri.go:135] skipping {11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 running}: state = "running", want "paused"
	I0916 10:48:22.896384 2090441 cri.go:129] container: {ID:31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 Status:running}
	I0916 10:48:22.896389 2090441 cri.go:135] skipping {31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 running}: state = "running", want "paused"
	I0916 10:48:22.896394 2090441 cri.go:129] container: {ID:334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c Status:running}
	I0916 10:48:22.896399 2090441 cri.go:131] skipping 334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c - not in ps
	I0916 10:48:22.896404 2090441 cri.go:129] container: {ID:492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 Status:running}
	I0916 10:48:22.896409 2090441 cri.go:135] skipping {492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 running}: state = "running", want "paused"
	I0916 10:48:22.896414 2090441 cri.go:129] container: {ID:51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a Status:running}
	I0916 10:48:22.896418 2090441 cri.go:131] skipping 51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a - not in ps
	I0916 10:48:22.896421 2090441 cri.go:129] container: {ID:54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f Status:running}
	I0916 10:48:22.896424 2090441 cri.go:131] skipping 54de1abbce22fcdde92a55b76e3d1b48997f1126edcbda81fab884dc8ac0359f - not in ps
	I0916 10:48:22.896427 2090441 cri.go:129] container: {ID:578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7 Status:running}
	I0916 10:48:22.896430 2090441 cri.go:131] skipping 578f22ca4016c09e359b02c955543ee2ef28a9104d51c3a385d7c351fd9c9cd7 - not in ps
	I0916 10:48:22.896433 2090441 cri.go:129] container: {ID:57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 Status:running}
	I0916 10:48:22.896438 2090441 cri.go:135] skipping {57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 running}: state = "running", want "paused"
	I0916 10:48:22.896442 2090441 cri.go:129] container: {ID:8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 Status:running}
	I0916 10:48:22.896447 2090441 cri.go:135] skipping {8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 running}: state = "running", want "paused"
	I0916 10:48:22.896451 2090441 cri.go:129] container: {ID:928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a Status:running}
	I0916 10:48:22.896455 2090441 cri.go:135] skipping {928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a running}: state = "running", want "paused"
	I0916 10:48:22.896459 2090441 cri.go:129] container: {ID:95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd Status:running}
	I0916 10:48:22.896464 2090441 cri.go:131] skipping 95b6f0353b09135399ee98532686d8ecd2bd23a479b890b3ee6fc0070983ddfd - not in ps
	I0916 10:48:22.896466 2090441 cri.go:129] container: {ID:a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 Status:running}
	I0916 10:48:22.896470 2090441 cri.go:135] skipping {a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 running}: state = "running", want "paused"
	I0916 10:48:22.896474 2090441 cri.go:129] container: {ID:b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e Status:running}
	I0916 10:48:22.896478 2090441 cri.go:131] skipping b400f9b4bc92361d4d3b0d6f93426a0ec3622d952d656f0badbacd7f891ab78e - not in ps
	I0916 10:48:22.896480 2090441 cri.go:129] container: {ID:c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1 Status:running}
	I0916 10:48:22.896484 2090441 cri.go:131] skipping c900cfd22280fcc5fccd5132381b8983bc57dff3a39f79ab7eda53862cb3e2c1 - not in ps
	I0916 10:48:22.896491 2090441 cri.go:129] container: {ID:ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b Status:running}
	I0916 10:48:22.896497 2090441 cri.go:135] skipping {ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b running}: state = "running", want "paused"
	I0916 10:48:22.896502 2090441 cri.go:129] container: {ID:e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e Status:running}
	I0916 10:48:22.896506 2090441 cri.go:131] skipping e43a7a67672f161f12fba0be75bd099342d5466a3f70a1d48ffedbe08802f86e - not in ps
	I0916 10:48:22.896558 2090441 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:48:22.906082 2090441 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 10:48:22.906092 2090441 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 10:48:22.906148 2090441 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 10:48:22.914917 2090441 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:48:22.915488 2090441 kubeconfig.go:125] found "functional-911502" server: "https://192.168.49.2:8441"
	I0916 10:48:22.916782 2090441 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 10:48:22.925899 2090441 kubeadm.go:640] detected kubeadm config drift (will reconfigure cluster from new /var/tmp/minikube/kubeadm.yaml):
	-- stdout --
	--- /var/tmp/minikube/kubeadm.yaml	2024-09-16 10:47:21.242945037 +0000
	+++ /var/tmp/minikube/kubeadm.yaml.new	2024-09-16 10:48:22.250558320 +0000
	@@ -22,7 +22,7 @@
	 apiServer:
	   certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	   extraArgs:
	-    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	+    enable-admission-plugins: "NamespaceAutoProvision"
	 controllerManager:
	   extraArgs:
	     allocate-node-cidrs: "true"
	
	-- /stdout --
	I0916 10:48:22.925911 2090441 kubeadm.go:1160] stopping kube-system containers ...
	I0916 10:48:22.925922 2090441 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:all Name: Namespaces:[kube-system]}
	I0916 10:48:22.925986 2090441 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:48:22.969464 2090441 cri.go:89] found id: "11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950"
	I0916 10:48:22.969476 2090441 cri.go:89] found id: "8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27"
	I0916 10:48:22.969480 2090441 cri.go:89] found id: "ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b"
	I0916 10:48:22.969488 2090441 cri.go:89] found id: "57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6"
	I0916 10:48:22.969491 2090441 cri.go:89] found id: "a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	I0916 10:48:22.969494 2090441 cri.go:89] found id: "492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31"
	I0916 10:48:22.969497 2090441 cri.go:89] found id: "31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44"
	I0916 10:48:22.969499 2090441 cri.go:89] found id: "928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a"
	I0916 10:48:22.969502 2090441 cri.go:89] found id: ""
	I0916 10:48:22.969506 2090441 cri.go:252] Stopping containers: [11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b 57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a]
	I0916 10:48:22.969568 2090441 ssh_runner.go:195] Run: which crictl
	I0916 10:48:22.973333 2090441 ssh_runner.go:195] Run: sudo /usr/bin/crictl stop --timeout=10 11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b 57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a
	I0916 10:48:38.633471 2090441 ssh_runner.go:235] Completed: sudo /usr/bin/crictl stop --timeout=10 11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950 8aede76947864ca07593bc24b939a29faf7bb7dd85244f30f18f232f3ec1ea27 ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b 57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6 a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19 492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31 31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44 928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a: (15.660102208s)
	I0916 10:48:38.633532 2090441 ssh_runner.go:195] Run: sudo systemctl stop kubelet
	I0916 10:48:38.739961 2090441 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:48:38.750010 2090441 kubeadm.go:157] found existing configuration files:
	-rw------- 1 root root 5651 Sep 16 10:47 /etc/kubernetes/admin.conf
	-rw------- 1 root root 5652 Sep 16 10:47 /etc/kubernetes/controller-manager.conf
	-rw------- 1 root root 2007 Sep 16 10:47 /etc/kubernetes/kubelet.conf
	-rw------- 1 root root 5600 Sep 16 10:47 /etc/kubernetes/scheduler.conf
	
	I0916 10:48:38.750102 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/admin.conf
	I0916 10:48:38.760985 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/kubelet.conf
	I0916 10:48:38.771244 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/controller-manager.conf
	I0916 10:48:38.781324 2090441 kubeadm.go:163] "https://control-plane.minikube.internal:8441" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/controller-manager.conf: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:48:38.781386 2090441 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:48:38.791843 2090441 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/scheduler.conf
	I0916 10:48:38.802301 2090441 kubeadm.go:163] "https://control-plane.minikube.internal:8441" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8441 /etc/kubernetes/scheduler.conf: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:48:38.802359 2090441 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:48:38.811917 2090441 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:48:38.825359 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase certs all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:38.890532 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase kubeconfig all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.317726 2090441 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase kubeconfig all --config /var/tmp/minikube/kubeadm.yaml": (1.427159279s)
	I0916 10:48:40.317743 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase kubelet-start --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.550624 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase control-plane all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.692801 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase etcd local --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:40.850212 2090441 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:48:40.850281 2090441 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:48:40.873399 2090441 api_server.go:72] duration metric: took 23.195463ms to wait for apiserver process to appear ...
	I0916 10:48:40.873414 2090441 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:48:40.873442 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.076555 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	W0916 10:48:42.076579 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	I0916 10:48:42.076592 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.086814 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	W0916 10:48:42.086832 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	I0916 10:48:42.374183 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.383929 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:42.383952 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:42.874448 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:42.911477 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:42.911495 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:43.373607 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:43.373978 2090441 api_server.go:269] stopped: https://192.168.49.2:8441/healthz: Get "https://192.168.49.2:8441/healthz": dial tcp 192.168.49.2:8441: connect: connection refused
	I0916 10:48:43.873567 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:43.873950 2090441 api_server.go:269] stopped: https://192.168.49.2:8441/healthz: Get "https://192.168.49.2:8441/healthz": dial tcp 192.168.49.2:8441: connect: connection refused
	I0916 10:48:44.373541 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:46.967237 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	W0916 10:48:46.967252 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 403:
	{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/healthz\"","reason":"Forbidden","details":{},"code":403}
	I0916 10:48:46.967269 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:47.234554 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:47.234573 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:47.373856 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:47.381648 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:47.381668 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:47.874262 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:47.881898 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	W0916 10:48:47.881918 2090441 api_server.go:103] status: https://192.168.49.2:8441/healthz returned error 500:
	[+]ping ok
	[+]log ok
	[+]etcd ok
	[+]poststarthook/start-apiserver-admission-initializer ok
	[+]poststarthook/generic-apiserver-start-informers ok
	[+]poststarthook/priority-and-fairness-config-consumer ok
	[+]poststarthook/priority-and-fairness-filter ok
	[+]poststarthook/storage-object-count-tracker-hook ok
	[+]poststarthook/start-apiextensions-informers ok
	[+]poststarthook/start-apiextensions-controllers ok
	[+]poststarthook/crd-informer-synced ok
	[+]poststarthook/start-system-namespaces-controller ok
	[+]poststarthook/start-cluster-authentication-info-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-controller ok
	[+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok
	[+]poststarthook/start-legacy-token-tracking-controller ok
	[+]poststarthook/start-service-ip-repair-controllers ok
	[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
	[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
	[+]poststarthook/priority-and-fairness-config-producer ok
	[+]poststarthook/bootstrap-controller ok
	[+]poststarthook/aggregator-reload-proxy-client-cert ok
	[+]poststarthook/start-kube-aggregator-informers ok
	[+]poststarthook/apiservice-status-local-available-controller ok
	[+]poststarthook/apiservice-status-remote-available-controller ok
	[+]poststarthook/apiservice-registration-controller ok
	[+]poststarthook/apiservice-discovery-controller ok
	[+]poststarthook/kube-apiserver-autoregistration ok
	[+]autoregister-completion ok
	[+]poststarthook/apiservice-openapi-controller ok
	[+]poststarthook/apiservice-openapiv3-controller ok
	healthz check failed
	I0916 10:48:48.373987 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:48.381643 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
	ok
	I0916 10:48:48.395482 2090441 api_server.go:141] control plane version: v1.31.1
	I0916 10:48:48.395506 2090441 api_server.go:131] duration metric: took 7.522086105s to wait for apiserver health ...
	I0916 10:48:48.395515 2090441 cni.go:84] Creating CNI manager for ""
	I0916 10:48:48.395522 2090441 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:48:48.398238 2090441 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:48:48.400734 2090441 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:48:48.404728 2090441 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:48:48.404739 2090441 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:48:48.426027 2090441 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:48:48.844610 2090441 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:48:48.855743 2090441 system_pods.go:59] 8 kube-system pods found
	I0916 10:48:48.855765 2090441 system_pods.go:61] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running
	I0916 10:48:48.855775 2090441 system_pods.go:61] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 10:48:48.855785 2090441 system_pods.go:61] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:48.855802 2090441 system_pods.go:61] "kube-apiserver-functional-911502" [49c666a4-4c79-4cfd-9ed9-f8307aea2147] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 10:48:48.855809 2090441 system_pods.go:61] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 10:48:48.855818 2090441 system_pods.go:61] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:48.855824 2090441 system_pods.go:61] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running / Ready:ContainersNotReady (containers with unready status: [kube-scheduler]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-scheduler])
	I0916 10:48:48.855835 2090441 system_pods.go:61] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
	I0916 10:48:48.855842 2090441 system_pods.go:74] duration metric: took 11.220081ms to wait for pod list to return data ...
	I0916 10:48:48.855851 2090441 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:48:48.865905 2090441 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:48:48.865924 2090441 node_conditions.go:123] node cpu capacity is 2
	I0916 10:48:48.865935 2090441 node_conditions.go:105] duration metric: took 10.079887ms to run NodePressure ...
	I0916 10:48:48.865953 2090441 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init phase addon all --config /var/tmp/minikube/kubeadm.yaml"
	I0916 10:48:49.117268 2090441 kubeadm.go:724] waiting for restarted kubelet to initialise ...
	I0916 10:48:49.129456 2090441 kubeadm.go:739] kubelet initialised
	I0916 10:48:49.129467 2090441 kubeadm.go:740] duration metric: took 12.186746ms waiting for restarted kubelet to initialise ...
	I0916 10:48:49.129475 2090441 pod_ready.go:36] extra waiting up to 4m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:49.137397 2090441 pod_ready.go:79] waiting up to 4m0s for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:49.144966 2090441 pod_ready.go:93] pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:49.144978 2090441 pod_ready.go:82] duration metric: took 7.566607ms for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:49.144988 2090441 pod_ready.go:79] waiting up to 4m0s for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:51.150942 2090441 pod_ready.go:103] pod "etcd-functional-911502" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:52.151654 2090441 pod_ready.go:93] pod "etcd-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:52.151667 2090441 pod_ready.go:82] duration metric: took 3.006671097s for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:52.151678 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:54.158468 2090441 pod_ready.go:103] pod "kube-apiserver-functional-911502" in "kube-system" namespace has status "Ready":"False"
	I0916 10:48:56.158238 2090441 pod_ready.go:93] pod "kube-apiserver-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.158254 2090441 pod_ready.go:82] duration metric: took 4.006566025s for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.158263 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.165526 2090441 pod_ready.go:93] pod "kube-controller-manager-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.165538 2090441 pod_ready.go:82] duration metric: took 7.267917ms for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.165547 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.171832 2090441 pod_ready.go:93] pod "kube-proxy-l59dx" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.171843 2090441 pod_ready.go:82] duration metric: took 6.290339ms for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.171853 2090441 pod_ready.go:79] waiting up to 4m0s for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.177575 2090441 pod_ready.go:93] pod "kube-scheduler-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.177587 2090441 pod_ready.go:82] duration metric: took 5.727669ms for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.177598 2090441 pod_ready.go:39] duration metric: took 7.048114784s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:56.177613 2090441 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:48:56.185221 2090441 ops.go:34] apiserver oom_adj: -16
	I0916 10:48:56.185232 2090441 kubeadm.go:597] duration metric: took 33.279135817s to restartPrimaryControlPlane
	I0916 10:48:56.185240 2090441 kubeadm.go:394] duration metric: took 33.360438088s to StartCluster
	I0916 10:48:56.185255 2090441 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:56.185318 2090441 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:48:56.185924 2090441 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:48:56.186127 2090441 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:48:56.186461 2090441 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:48:56.186501 2090441 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:48:56.186566 2090441 addons.go:69] Setting storage-provisioner=true in profile "functional-911502"
	I0916 10:48:56.186578 2090441 addons.go:234] Setting addon storage-provisioner=true in "functional-911502"
	W0916 10:48:56.186583 2090441 addons.go:243] addon storage-provisioner should already be in state true
	I0916 10:48:56.186603 2090441 host.go:66] Checking if "functional-911502" exists ...
	I0916 10:48:56.186753 2090441 addons.go:69] Setting default-storageclass=true in profile "functional-911502"
	I0916 10:48:56.186769 2090441 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "functional-911502"
	I0916 10:48:56.187108 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:56.187112 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:56.191212 2090441 out.go:177] * Verifying Kubernetes components...
	I0916 10:48:56.194523 2090441 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:48:56.225453 2090441 addons.go:234] Setting addon default-storageclass=true in "functional-911502"
	W0916 10:48:56.225463 2090441 addons.go:243] addon default-storageclass should already be in state true
	I0916 10:48:56.225486 2090441 host.go:66] Checking if "functional-911502" exists ...
	I0916 10:48:56.225903 2090441 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
	I0916 10:48:56.230244 2090441 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:48:56.235857 2090441 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:56.235869 2090441 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:48:56.235948 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:56.263017 2090441 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:56.263031 2090441 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:48:56.263097 2090441 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
	I0916 10:48:56.282120 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:56.301735 2090441 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
	I0916 10:48:56.353010 2090441 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:48:56.368505 2090441 node_ready.go:35] waiting up to 6m0s for node "functional-911502" to be "Ready" ...
	I0916 10:48:56.372081 2090441 node_ready.go:49] node "functional-911502" has status "Ready":"True"
	I0916 10:48:56.372091 2090441 node_ready.go:38] duration metric: took 3.567903ms for node "functional-911502" to be "Ready" ...
	I0916 10:48:56.372099 2090441 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:56.379540 2090441 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.407301 2090441 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:48:56.437317 2090441 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:48:56.555436 2090441 pod_ready.go:93] pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.555447 2090441 pod_ready.go:82] duration metric: took 175.892756ms for pod "coredns-7c65d6cfc9-6kw9d" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.555456 2090441 pod_ready.go:79] waiting up to 6m0s for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.956056 2090441 pod_ready.go:93] pod "etcd-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:56.956068 2090441 pod_ready.go:82] duration metric: took 400.604402ms for pod "etcd-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:56.956081 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.232685 2090441 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 10:48:57.235189 2090441 addons.go:510] duration metric: took 1.048680086s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 10:48:57.355420 2090441 pod_ready.go:93] pod "kube-apiserver-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:57.355430 2090441 pod_ready.go:82] duration metric: took 399.343184ms for pod "kube-apiserver-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.355440 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.755583 2090441 pod_ready.go:93] pod "kube-controller-manager-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:57.755595 2090441 pod_ready.go:82] duration metric: took 400.148824ms for pod "kube-controller-manager-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:57.755605 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.155248 2090441 pod_ready.go:93] pod "kube-proxy-l59dx" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:58.155259 2090441 pod_ready.go:82] duration metric: took 399.64819ms for pod "kube-proxy-l59dx" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.155269 2090441 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.555115 2090441 pod_ready.go:93] pod "kube-scheduler-functional-911502" in "kube-system" namespace has status "Ready":"True"
	I0916 10:48:58.555126 2090441 pod_ready.go:82] duration metric: took 399.851365ms for pod "kube-scheduler-functional-911502" in "kube-system" namespace to be "Ready" ...
	I0916 10:48:58.555137 2090441 pod_ready.go:39] duration metric: took 2.183026428s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:48:58.555151 2090441 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:48:58.555220 2090441 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:48:58.566810 2090441 api_server.go:72] duration metric: took 2.380658837s to wait for apiserver process to appear ...
	I0916 10:48:58.566825 2090441 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:48:58.566852 2090441 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8441/healthz ...
	I0916 10:48:58.574536 2090441 api_server.go:279] https://192.168.49.2:8441/healthz returned 200:
	ok
	I0916 10:48:58.575555 2090441 api_server.go:141] control plane version: v1.31.1
	I0916 10:48:58.575570 2090441 api_server.go:131] duration metric: took 8.738422ms to wait for apiserver health ...
	I0916 10:48:58.575586 2090441 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:48:58.759444 2090441 system_pods.go:59] 8 kube-system pods found
	I0916 10:48:58.759464 2090441 system_pods.go:61] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 10:48:58.759469 2090441 system_pods.go:61] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running
	I0916 10:48:58.759474 2090441 system_pods.go:61] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:58.759478 2090441 system_pods.go:61] "kube-apiserver-functional-911502" [49c666a4-4c79-4cfd-9ed9-f8307aea2147] Running
	I0916 10:48:58.759482 2090441 system_pods.go:61] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running
	I0916 10:48:58.759485 2090441 system_pods.go:61] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:58.759488 2090441 system_pods.go:61] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running
	I0916 10:48:58.759493 2090441 system_pods.go:61] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
	I0916 10:48:58.759499 2090441 system_pods.go:74] duration metric: took 183.907696ms to wait for pod list to return data ...
	I0916 10:48:58.759506 2090441 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:48:58.956246 2090441 default_sa.go:45] found service account: "default"
	I0916 10:48:58.956260 2090441 default_sa.go:55] duration metric: took 196.748952ms for default service account to be created ...
	I0916 10:48:58.956270 2090441 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:48:59.159500 2090441 system_pods.go:86] 8 kube-system pods found
	I0916 10:48:59.159531 2090441 system_pods.go:89] "coredns-7c65d6cfc9-6kw9d" [072167c7-fa1a-463e-a957-91ea24020387] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 10:48:59.159538 2090441 system_pods.go:89] "etcd-functional-911502" [9cccef26-ac83-485f-a6ae-2017f0ff645b] Running
	I0916 10:48:59.159543 2090441 system_pods.go:89] "kindnet-7r2rg" [ab52a601-e0fe-4f60-a202-477487da9bb2] Running
	I0916 10:48:59.159547 2090441 system_pods.go:89] "kube-apiserver-functional-911502" [49c666a4-4c79-4cfd-9ed9-f8307aea2147] Running
	I0916 10:48:59.159551 2090441 system_pods.go:89] "kube-controller-manager-functional-911502" [60f8d5ef-11df-400e-bce8-00ed7502b8c7] Running
	I0916 10:48:59.159559 2090441 system_pods.go:89] "kube-proxy-l59dx" [72a26843-9f97-4121-91f0-3cb389048315] Running
	I0916 10:48:59.159566 2090441 system_pods.go:89] "kube-scheduler-functional-911502" [7da8ecbb-189d-4ed2-bcbe-69ef483b67e8] Running
	I0916 10:48:59.159572 2090441 system_pods.go:89] "storage-provisioner" [ecac562d-8318-4226-b5f1-61f2c76bb51b] Running / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
	I0916 10:48:59.159582 2090441 system_pods.go:126] duration metric: took 203.306375ms to wait for k8s-apps to be running ...
	I0916 10:48:59.159588 2090441 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:48:59.159665 2090441 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:48:59.172329 2090441 system_svc.go:56] duration metric: took 12.729233ms WaitForService to wait for kubelet
	I0916 10:48:59.172348 2090441 kubeadm.go:582] duration metric: took 2.986201415s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:48:59.172365 2090441 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:48:59.356377 2090441 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:48:59.356393 2090441 node_conditions.go:123] node cpu capacity is 2
	I0916 10:48:59.356402 2090441 node_conditions.go:105] duration metric: took 184.03252ms to run NodePressure ...
	I0916 10:48:59.356414 2090441 start.go:241] waiting for startup goroutines ...
	I0916 10:48:59.356420 2090441 start.go:246] waiting for cluster config update ...
	I0916 10:48:59.356430 2090441 start.go:255] writing updated cluster config ...
	I0916 10:48:59.356743 2090441 ssh_runner.go:195] Run: rm -f paused
	I0916 10:48:59.365595 2090441 out.go:177] * Done! kubectl is now configured to use "functional-911502" cluster and "default" namespace by default
	E0916 10:48:59.367860 2090441 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	494883dd75dac       ba04bb24b9575       About a minute ago   Running             storage-provisioner       3                   334ec243859df       storage-provisioner
	0b22f9fb6da5e       ba04bb24b9575       2 minutes ago        Exited              storage-provisioner       2                   334ec243859df       storage-provisioner
	da9cb172fba10       d3f53a98c0a9d       2 minutes ago        Running             kube-apiserver            1                   6b31db950b5d2       kube-apiserver-functional-911502
	95188fff801b2       d3f53a98c0a9d       2 minutes ago        Exited              kube-apiserver            0                   6b31db950b5d2       kube-apiserver-functional-911502
	16a7dfc9e0119       7f8aa378bb47d       2 minutes ago        Running             kube-scheduler            1                   578f22ca4016c       kube-scheduler-functional-911502
	d954d9e91e01c       279f381cb3736       2 minutes ago        Running             kube-controller-manager   1                   54de1abbce22f       kube-controller-manager-functional-911502
	1a427a607f521       27e3830e14027       2 minutes ago        Running             etcd                      1                   e43a7a67672f1       etcd-functional-911502
	472eb48e2a576       6a23fa8fd2b78       2 minutes ago        Running             kindnet-cni               1                   b400f9b4bc923       kindnet-7r2rg
	1e1c55f6d316e       24a140c548c07       2 minutes ago        Running             kube-proxy                1                   c900cfd22280f       kube-proxy-l59dx
	381c4c4cdcc1a       2f6c962e7b831       2 minutes ago        Running             coredns                   1                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	11757969f67eb       2f6c962e7b831       2 minutes ago        Exited              coredns                   0                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	ce5a28d1cb5d2       6a23fa8fd2b78       3 minutes ago        Exited              kindnet-cni               0                   b400f9b4bc923       kindnet-7r2rg
	57c3cd94d0c59       24a140c548c07       3 minutes ago        Exited              kube-proxy                0                   c900cfd22280f       kube-proxy-l59dx
	492408bc37d38       27e3830e14027       3 minutes ago        Exited              etcd                      0                   e43a7a67672f1       etcd-functional-911502
	31265291ac7da       7f8aa378bb47d       3 minutes ago        Exited              kube-scheduler            0                   578f22ca4016c       kube-scheduler-functional-911502
	928f2c64d0a66       279f381cb3736       3 minutes ago        Exited              kube-controller-manager   0                   54de1abbce22f       kube-controller-manager-functional-911502
	
	
	==> containerd <==
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.070325855Z" level=error msg="ContainerStatus for \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\" failed" error="rpc error: code = NotFound desc = an error occurred when try to find container \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": not found"
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.079627605Z" level=info msg="CreateContainer within sandbox \"6b31db950b5d280a5feb49909dd8de1ece9d6371214eb94fe9fc239781f367ab\" for &ContainerMetadata{Name:kube-apiserver,Attempt:1,} returns container id \"da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12\""
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.080253645Z" level=info msg="StartContainer for \"da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12\""
	Sep 16 10:48:44 functional-911502 containerd[3778]: time="2024-09-16T10:48:44.223110276Z" level=info msg="StartContainer for \"da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12\" returns successfully"
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.551867636Z" level=info msg="CreateContainer within sandbox \"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c\" for container &ContainerMetadata{Name:storage-provisioner,Attempt:2,}"
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.568904337Z" level=info msg="CreateContainer within sandbox \"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c\" for &ContainerMetadata{Name:storage-provisioner,Attempt:2,} returns container id \"0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc\""
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.569729562Z" level=info msg="StartContainer for \"0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc\""
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.631010947Z" level=info msg="StartContainer for \"0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc\" returns successfully"
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.663906337Z" level=info msg="shim disconnected" id=0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc namespace=k8s.io
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.664118020Z" level=warning msg="cleaning up after shim disconnected" id=0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc namespace=k8s.io
	Sep 16 10:48:47 functional-911502 containerd[3778]: time="2024-09-16T10:48:47.664142586Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:48:48 functional-911502 containerd[3778]: time="2024-09-16T10:48:48.087297687Z" level=info msg="RemoveContainer for \"ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be\""
	Sep 16 10:48:48 functional-911502 containerd[3778]: time="2024-09-16T10:48:48.096408333Z" level=info msg="RemoveContainer for \"ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be\" returns successfully"
	Sep 16 10:49:01 functional-911502 containerd[3778]: time="2024-09-16T10:49:01.912835405Z" level=info msg="CreateContainer within sandbox \"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c\" for container &ContainerMetadata{Name:storage-provisioner,Attempt:3,}"
	Sep 16 10:49:01 functional-911502 containerd[3778]: time="2024-09-16T10:49:01.935474548Z" level=info msg="CreateContainer within sandbox \"334ec243859dfdf8acd2c8a75f0b465044008e1af77d6605f50bdc7b4b4cce4c\" for &ContainerMetadata{Name:storage-provisioner,Attempt:3,} returns container id \"494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb\""
	Sep 16 10:49:01 functional-911502 containerd[3778]: time="2024-09-16T10:49:01.942508333Z" level=info msg="StartContainer for \"494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb\""
	Sep 16 10:49:02 functional-911502 containerd[3778]: time="2024-09-16T10:49:02.043245100Z" level=info msg="StartContainer for \"494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb\" returns successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.771343568Z" level=info msg="StopPodSandbox for \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\""
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.771455657Z" level=info msg="TearDown network for sandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.771471157Z" level=info msg="StopPodSandbox for \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" returns successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.772324288Z" level=info msg="RemovePodSandbox for \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\""
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.772366979Z" level=info msg="Forcibly stopping sandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\""
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.772456357Z" level=info msg="TearDown network for sandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" successfully"
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.777169287Z" level=warning msg="Failed to get podSandbox status for container event for sandboxID \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\": an error occurred when try to find sandbox: not found. Sending the event with nil podSandboxStatus."
	Sep 16 10:49:40 functional-911502 containerd[3778]: time="2024-09-16T10:49:40.777295785Z" level=info msg="RemovePodSandbox \"51fb442d9d3e0fc9e9713de051f556893ed4577eb8b5d9d38835da8d64517b7a\" returns successfully"
	
	
	==> coredns [11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39497 - 35637 "HINFO IN 756688810597303784.5152931065563193714. udp 56 false 512" NXDOMAIN qr,rd,ra 56 0.040061481s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> coredns [381c4c4cdcc1a65a0e4d935f5449da5929d2fea3cf4e8c057860064146546ba0] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:57690 - 32762 "HINFO IN 7394603856605586965.6142061183963741332. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.012169588s
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: services is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "services" in API group "" at the cluster scope
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "services" in API group "" at the cluster scope
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: namespaces is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "namespaces" in API group "" at the cluster scope
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "namespaces" in API group "" at the cluster scope
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	
	
	==> describe nodes <==
	Name:               functional-911502
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-911502
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-911502
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_40_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:47:36 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-911502
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:50:45 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:36 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-911502
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 43a21049ac0d40628479cf884a8089e0
	  System UUID:                2830f6a5-4b63-46c5-b24a-468e4df19b79
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-6kw9d                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m4s
	  kube-system                 etcd-functional-911502                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m11s
	  kube-system                 kindnet-7r2rg                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m4s
	  kube-system                 kube-apiserver-functional-911502             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m1s
	  kube-system                 kube-controller-manager-functional-911502    200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m9s
	  kube-system                 kube-proxy-l59dx                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m4s
	  kube-system                 kube-scheduler-functional-911502             100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m9s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m4s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                  From             Message
	  ----     ------                   ----                 ----             -------
	  Normal   Starting                 3m3s                 kube-proxy       
	  Normal   Starting                 2m1s                 kube-proxy       
	  Normal   NodeAllocatableEnforced  3m9s                 kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 3m9s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m9s                 kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m9s                 kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m9s                 kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   Starting                 3m9s                 kubelet          Starting kubelet.
	  Normal   RegisteredNode           3m5s                 node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	  Normal   Starting                 2m8s                 kubelet          Starting kubelet.
	  Warning  CgroupV1                 2m8s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  2m7s (x8 over 2m7s)  kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    2m7s (x7 over 2m7s)  kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m7s (x7 over 2m7s)  kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  2m7s                 kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           118s                 node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [1a427a607f52143d7ababbbe77d9ccd5fb21bed4f47e6ea656a489787066bdd5] <==
	{"level":"info","ts":"2024-09-16T10:48:38.695375Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:38.695629Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.695854Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.695783Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:38.699522Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:38.699870Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:38.700575Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.701150Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.700996Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:40.180057Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180110Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180142Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180157Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180164Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180195Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180204Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.183853Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:40.183916Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:40.184260Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:40.185178Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:40.186391Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:40.187443Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:40.188457Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:40.194734Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:40.194782Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> etcd [492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31] <==
	{"level":"info","ts":"2024-09-16T10:47:33.255286Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.255359Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.262769Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.270884Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:47:33.271109Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.271402Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.272620Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.283805Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.271431Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284172Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284977Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.289591Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.306735Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306879Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306916Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.344113Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:48:38.344168Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-911502","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:48:38.344265Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.344295Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.345876Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.345916Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:48:38.345965Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:48:38.347534Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.347628Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.347663Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-911502","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:50:48 up 1 day, 14:33,  0 users,  load average: 0.58, 1.00, 1.26
	Linux functional-911502 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [472eb48e2a57659caeaf99025beaec2f96e01b98d7d3d7676515ac24fb61fb58] <==
	I0916 10:48:50.320990       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:48:50.321150       1 metrics.go:61] Registering metrics
	I0916 10:48:50.321308       1 controller.go:374] Syncing nftables rules
	I0916 10:48:59.220364       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:59.220423       1 main.go:299] handling current node
	I0916 10:49:09.220620       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:09.220654       1 main.go:299] handling current node
	I0916 10:49:19.222817       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:19.222857       1 main.go:299] handling current node
	I0916 10:49:29.219698       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:29.219737       1 main.go:299] handling current node
	I0916 10:49:39.219237       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:39.219276       1 main.go:299] handling current node
	I0916 10:49:49.219131       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:49.219171       1 main.go:299] handling current node
	I0916 10:49:59.219970       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:59.220080       1 main.go:299] handling current node
	I0916 10:50:09.224203       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:09.224418       1 main.go:299] handling current node
	I0916 10:50:19.219828       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:19.219867       1 main.go:299] handling current node
	I0916 10:50:29.219398       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:29.219439       1 main.go:299] handling current node
	I0916 10:50:39.228234       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:39.228270       1 main.go:299] handling current node
	
	
	==> kindnet [ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b] <==
	I0916 10:47:45.041351       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:47:45.041663       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:47:45.041804       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:47:45.041819       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:47:45.041830       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:47:45.520963       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:47:45.521152       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:47:45.521205       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:47:45.722171       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:47:45.722199       1 metrics.go:61] Registering metrics
	I0916 10:47:45.722266       1 controller.go:374] Syncing nftables rules
	I0916 10:47:55.524822       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:47:55.524859       1 main.go:299] handling current node
	I0916 10:48:05.526968       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:05.527019       1 main.go:299] handling current node
	I0916 10:48:15.528186       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:15.528219       1 main.go:299] handling current node
	I0916 10:48:25.523558       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:25.523675       1 main.go:299] handling current node
	
	
	==> kube-apiserver [95188fff801b22ea7d5c57a472a58be0bc02010422f1867802de90863ce56801] <==
	I0916 10:48:42.839848       1 options.go:228] external host was not specified, using 192.168.49.2
	I0916 10:48:42.855248       1 server.go:142] Version: v1.31.1
	I0916 10:48:42.855626       1 server.go:144] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	E0916 10:48:42.856037       1 run.go:72] "command failed" err="failed to create listener: failed to listen on 0.0.0.0:8441: listen tcp 0.0.0.0:8441: bind: address already in use"
	
	
	==> kube-apiserver [da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12] <==
	I0916 10:48:47.150778       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:48:47.150982       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:48:47.151288       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:48:47.151407       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:48:47.151562       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:47.157426       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:47.157660       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:47.159024       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:48:47.159224       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:47.159303       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:47.159367       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:47.159432       1 cache.go:39] Caches are synced for autoregister controller
	I0916 10:48:47.189320       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:48:47.189540       1 policy_source.go:224] refreshing policies
	I0916 10:48:47.191266       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:48:47.230910       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:47.961268       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	W0916 10:48:48.300845       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2]
	I0916 10:48:48.302392       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:48:48.308164       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:48:48.837004       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:48:48.966544       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:48:48.985830       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:48:49.056843       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:48:49.065696       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	
	
	==> kube-controller-manager [928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a] <==
	I0916 10:47:43.260717       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:47:43.260741       1 shared_informer.go:320] Caches are synced for legacy-service-account-token-cleaner
	I0916 10:47:43.260789       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:47:43.264947       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:47:43.270925       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:43.274016       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:47:43.289621       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:47:43.309403       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:47:43.755815       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810781       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810815       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:47:43.822097       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:44.310052       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="384.204702ms"
	I0916 10:47:44.362819       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="52.71544ms"
	I0916 10:47:44.426732       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="63.859952ms"
	I0916 10:47:44.459758       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="32.950043ms"
	I0916 10:47:44.479101       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="19.296296ms"
	I0916 10:47:44.479182       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.386µs"
	I0916 10:47:46.277218       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="54.999µs"
	I0916 10:47:46.284221       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="47.499µs"
	I0916 10:47:46.289165       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.016µs"
	I0916 10:47:49.776695       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:56.302801       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="57.165µs"
	I0916 10:47:57.316220       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="18.588165ms"
	I0916 10:47:57.316368       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="102.169µs"
	
	
	==> kube-controller-manager [d954d9e91e01c0d0330d1084c35e9e29fd31db11cc4bc7b4efdc63e3c17afd45] <==
	I0916 10:48:50.420809       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:48:50.420721       1 shared_informer.go:320] Caches are synced for validatingadmissionpolicy-status
	I0916 10:48:50.421043       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="99.405µs"
	I0916 10:48:50.423833       1 shared_informer.go:320] Caches are synced for TTL
	I0916 10:48:50.426496       1 shared_informer.go:320] Caches are synced for taint-eviction-controller
	I0916 10:48:50.431094       1 shared_informer.go:320] Caches are synced for TTL after finished
	I0916 10:48:50.433168       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:48:50.436096       1 shared_informer.go:320] Caches are synced for ReplicationController
	I0916 10:48:50.508138       1 shared_informer.go:320] Caches are synced for HPA
	I0916 10:48:50.570351       1 shared_informer.go:320] Caches are synced for expand
	I0916 10:48:50.594635       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:48:50.603270       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:48:50.620872       1 shared_informer.go:320] Caches are synced for ephemeral
	I0916 10:48:50.621193       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 10:48:50.621458       1 shared_informer.go:320] Caches are synced for PVC protection
	I0916 10:48:50.638042       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:50.660352       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:48:50.681442       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:48:51.072854       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:51.089681       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:48:51.090358       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:48:56.739270       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="40.969915ms"
	I0916 10:48:56.739344       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="41.895µs"
	I0916 10:49:06.732627       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="25.090829ms"
	I0916 10:49:06.733069       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="63.655µs"
	
	
	==> kube-proxy [1e1c55f6d316ecfe21daf226fdfd7a0c46b596f2904f3881ac5d62f21f9aa385] <==
	I0916 10:48:28.645183       1 server_linux.go:66] "Using iptables proxy"
	E0916 10:48:28.721587       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:29.873863       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:32.256620       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:36.493031       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	I0916 10:48:47.226267       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:47.226420       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:47.297852       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:47.298129       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:47.300753       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:47.301480       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:47.301624       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:47.312238       1 config.go:199] "Starting service config controller"
	I0916 10:48:47.312282       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:47.312370       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:47.312419       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:47.317887       1 config.go:328] "Starting node config controller"
	I0916 10:48:47.317941       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:47.413200       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:47.413370       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:47.418734       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6] <==
	I0916 10:47:44.794046       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:47:44.895525       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:47:44.895614       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:47:44.916570       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:47:44.916637       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:47:44.918597       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:47:44.919425       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:47:44.919452       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:47:44.925419       1 config.go:199] "Starting service config controller"
	I0916 10:47:44.925472       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:47:44.925521       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:47:44.925531       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:47:44.928903       1 config.go:328] "Starting node config controller"
	I0916 10:47:44.928927       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:47:45.030179       1 shared_informer.go:320] Caches are synced for node config
	I0916 10:47:45.030253       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:47:45.030287       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [16a7dfc9e01198cab582eac129b2a3162c14e01f4ade815bbd0695fd67b02c4c] <==
	I0916 10:48:43.411981       1 serving.go:386] Generated self-signed cert in-memory
	W0916 10:48:47.054124       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 10:48:47.054270       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 10:48:47.054329       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 10:48:47.054367       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 10:48:47.137658       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:47.137797       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:47.143443       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:47.143739       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:47.146739       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:47.146832       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:47.247771       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44] <==
	E0916 10:47:37.419493       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419534       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:47:37.419548       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419590       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:47:37.419607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419645       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419660       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419704       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419719       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422428       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:47:37.422472       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422530       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:47:37.422547       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422779       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 10:47:37.422805       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 10:47:37.422862       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:47:37.422883       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424481       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.424516       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424588       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:47:37.424607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424692       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:47:37.424724       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:47:38.910602       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	E0916 10:48:38.401394       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kubelet <==
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.041619    4699 status_manager.go:851] "Failed to get status for pod" podUID="c7ab8017ca620f2ba7e026f4cdb427a2" pod="kube-system/etcd-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/etcd-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.041817    4699 status_manager.go:851] "Failed to get status for pod" podUID="0082f76f53cc9a35311f900de9a4ce8a" pod="kube-system/kube-apiserver-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.041988    4699 status_manager.go:851] "Failed to get status for pod" podUID="26c4a2e985a1c721e0411e5d9497a35b" pod="kube-system/kube-controller-manager-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-controller-manager-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.042149    4699 status_manager.go:851] "Failed to get status for pod" podUID="69dae9ff35c780f43a15f539d6f19e46" pod="kube-system/kube-scheduler-functional-911502" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.042319    4699 status_manager.go:851] "Failed to get status for pod" podUID="72a26843-9f97-4121-91f0-3cb389048315" pod="kube-system/kube-proxy-l59dx" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-proxy-l59dx\": dial tcp 192.168.49.2:8441: connect: connection refused"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.052916    4699 scope.go:117] "RemoveContainer" containerID="a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.064122    4699 scope.go:117] "RemoveContainer" containerID="dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.069838    4699 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = an error occurred when try to find container \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\": not found" containerID="dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.069900    4699 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa"} err="failed to get container status \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\": rpc error: code = NotFound desc = an error occurred when try to find container \"dbb62da7a58f0d5794d6f2d8eae0e54165c4c9db20cd516853e8db2f91046aaa\": not found"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.069984    4699 scope.go:117] "RemoveContainer" containerID="a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.070544    4699 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = an error occurred when try to find container \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": not found" containerID="a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.070591    4699 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"containerd","ID":"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19"} err="failed to get container status \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": rpc error: code = NotFound desc = an error occurred when try to find container \"a27dc93745c627132e1a916d369da9f0946a0e606a9828186ba74feb5cb8da19\": not found"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: W0916 10:48:44.086224    4699 reflector.go:561] object-"kube-system"/"coredns": failed to list *v1.ConfigMap: Get "https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/configmaps?fieldSelector=metadata.name%3Dcoredns&resourceVersion=447": dial tcp 192.168.49.2:8441: connect: connection refused
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.086285    4699 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"coredns\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/configmaps?fieldSelector=metadata.name%3Dcoredns&resourceVersion=447\": dial tcp 192.168.49.2:8441: connect: connection refused" logger="UnhandledError"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: W0916 10:48:44.257240    4699 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://control-plane.minikube.internal:8441/apis/node.k8s.io/v1/runtimeclasses?resourceVersion=447": dial tcp 192.168.49.2:8441: connect: connection refused
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.257307    4699 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://control-plane.minikube.internal:8441/apis/node.k8s.io/v1/runtimeclasses?resourceVersion=447\": dial tcp 192.168.49.2:8441: connect: connection refused" logger="UnhandledError"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.911110    4699 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="846e81f7bcac6804cf5ef499ea5ac265" path="/var/lib/kubelet/pods/846e81f7bcac6804cf5ef499ea5ac265/volumes"
	Sep 16 10:48:45 functional-911502 kubelet[4699]: I0916 10:48:45.045335    4699 kubelet.go:1895] "Trying to delete pod" pod="kube-system/kube-apiserver-functional-911502" podUID="d399bd77-51dd-4ad3-90d4-6cf11e9e156e"
	Sep 16 10:48:47 functional-911502 kubelet[4699]: I0916 10:48:47.316886    4699 kubelet.go:1900] "Deleted mirror pod because it is outdated" pod="kube-system/kube-apiserver-functional-911502"
	Sep 16 10:48:47 functional-911502 kubelet[4699]: I0916 10:48:47.549623    4699 scope.go:117] "RemoveContainer" containerID="ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: I0916 10:48:48.083956    4699 scope.go:117] "RemoveContainer" containerID="ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: I0916 10:48:48.084957    4699 scope.go:117] "RemoveContainer" containerID="0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: E0916 10:48:48.085234    4699 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"storage-provisioner\" with CrashLoopBackOff: \"back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(ecac562d-8318-4226-b5f1-61f2c76bb51b)\"" pod="kube-system/storage-provisioner" podUID="ecac562d-8318-4226-b5f1-61f2c76bb51b"
	Sep 16 10:48:51 functional-911502 kubelet[4699]: I0916 10:48:51.367187    4699 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-apiserver-functional-911502" podStartSLOduration=4.367170601 podStartE2EDuration="4.367170601s" podCreationTimestamp="2024-09-16 10:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:48:48.153684049 +0000 UTC m=+7.599562137" watchObservedRunningTime="2024-09-16 10:48:51.367170601 +0000 UTC m=+10.813048706"
	Sep 16 10:49:01 functional-911502 kubelet[4699]: I0916 10:49:01.908487    4699 scope.go:117] "RemoveContainer" containerID="0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc"
	
	
	==> storage-provisioner [0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc] <==
	I0916 10:48:47.636953       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	F0916 10:48:47.638558       1 main.go:39] error getting server version: Get "https://10.96.0.1:443/version?timeout=32s": dial tcp 10.96.0.1:443: connect: connection refused
	
	
	==> storage-provisioner [494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb] <==
	I0916 10:49:02.043876       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:49:02.059730       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:49:02.059783       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:49:19.457349       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:49:19.457523       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5!
	I0916 10:49:19.457940       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"28a802e4-0156-4c92-adef-4d6f2592a206", APIVersion:"v1", ResourceVersion:"554", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5 became leader
	I0916 10:49:19.558377       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-911502 -n functional-911502
helpers_test.go:261: (dbg) Run:  kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (468.231µs)
helpers_test.go:263: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/parallel/PersistentVolumeClaim (101.82s)

                                                
                                    
x
+
TestFunctional/parallel/NodeLabels (4.37s)

                                                
                                                
=== RUN   TestFunctional/parallel/NodeLabels
=== PAUSE TestFunctional/parallel/NodeLabels

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NodeLabels
functional_test.go:219: (dbg) Run:  kubectl --context functional-911502 get nodes --output=go-template "--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'"
functional_test.go:219: (dbg) Non-zero exit: kubectl --context functional-911502 get nodes --output=go-template "--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'": fork/exec /usr/local/bin/kubectl: exec format error (523.131µs)
functional_test.go:221: failed to 'kubectl get nodes' with args "kubectl --context functional-911502 get nodes --output=go-template \"--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'\"": fork/exec /usr/local/bin/kubectl: exec format error
functional_test.go:227: expected to have label "minikube.k8s.io/commit" in node labels but got : 
functional_test.go:227: expected to have label "minikube.k8s.io/version" in node labels but got : 
functional_test.go:227: expected to have label "minikube.k8s.io/updated_at" in node labels but got : 
functional_test.go:227: expected to have label "minikube.k8s.io/name" in node labels but got : 
functional_test.go:227: expected to have label "minikube.k8s.io/primary" in node labels but got : 
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestFunctional/parallel/NodeLabels]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect functional-911502
helpers_test.go:235: (dbg) docker inspect functional-911502:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1",
	        "Created": "2024-09-16T10:47:14.597354828Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2085675,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:47:14.7319597Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hostname",
	        "HostsPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/hosts",
	        "LogPath": "/var/lib/docker/containers/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1/9bf795605895fd5c1b49134d7c1008e3f99211793f2a8cc56474398d179b83c1-json.log",
	        "Name": "/functional-911502",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "functional-911502:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "functional-911502",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 4194304000,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 8388608000,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/merged",
	                "UpperDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/diff",
	                "WorkDir": "/var/lib/docker/overlay2/fa0f885b577d36f13bcf75ac83e0609eb49d3ba44740079b3159a4f1756f6217/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "functional-911502",
	                "Source": "/var/lib/docker/volumes/functional-911502/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "functional-911502",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8441/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "functional-911502",
	                "name.minikube.sigs.k8s.io": "functional-911502",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "44bb4c6f2ec0f0eef04adb8f886d0e0de7d31ae50612de741bed0ee945b2b75e",
	            "SandboxKey": "/var/run/docker/netns/44bb4c6f2ec0",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40592"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40593"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40596"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40594"
	                    }
	                ],
	                "8441/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40595"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "functional-911502": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "8c4428adf23c812318456ac17bea5953b33d7961994dfc84c0ff82a45764b662",
	                    "EndpointID": "8b3cc6f2c9f87b61b7e755d7ecd320ed6313887dfb3deab9f4e0858aa1c9fe80",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "functional-911502",
	                        "9bf795605895"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p functional-911502 -n functional-911502
helpers_test.go:244: <<< TestFunctional/parallel/NodeLabels FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/NodeLabels]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 logs -n 25: (3.260734034s)
helpers_test.go:252: TestFunctional/parallel/NodeLabels logs: 
-- stdout --
	
	==> Audit <==
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	|  Command  |                                  Args                                   |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| ssh       | functional-911502 ssh cat                                               | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|           | /mount-9p/test-1726483856463003754                                      |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh mount |                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | grep 9p; ls -la /mount-9p; cat                                          |                   |         |         |                     |                     |
	|           | /mount-9p/pod-dates                                                     |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh sudo                                              | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:50 UTC |
	|           | umount -f /mount-9p                                                     |                   |         |         |                     |                     |
	| start     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | --dry-run --memory                                                      |                   |         |         |                     |                     |
	|           | 250MB --alsologtostderr                                                 |                   |         |         |                     |                     |
	|           | --driver=docker                                                         |                   |         |         |                     |                     |
	|           | --container-runtime=containerd                                          |                   |         |         |                     |                     |
	| start     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | --dry-run --memory                                                      |                   |         |         |                     |                     |
	|           | 250MB --alsologtostderr                                                 |                   |         |         |                     |                     |
	|           | --driver=docker                                                         |                   |         |         |                     |                     |
	|           | --container-runtime=containerd                                          |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| mount     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdspecific-port819290008/001:/mount-9p |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1 --port 46464                                     |                   |         |         |                     |                     |
	| start     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | --dry-run --alsologtostderr                                             |                   |         |         |                     |                     |
	|           | -v=1 --driver=docker                                                    |                   |         |         |                     |                     |
	|           | --container-runtime=containerd                                          |                   |         |         |                     |                     |
	| dashboard | --url --port 36195                                                      | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC |                     |
	|           | -p functional-911502                                                    |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:50 UTC | 16 Sep 24 10:51 UTC |
	|           | -T /mount-9p | grep 9p                                                  |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh -- ls                                             | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -la /mount-9p                                                           |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh sudo                                              | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | umount -f /mount-9p                                                     |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | -T /mount1                                                              |                   |         |         |                     |                     |
	| mount     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount2  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| mount     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount1  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| mount     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount3  |                   |         |         |                     |                     |
	|           | --alsologtostderr -v=1                                                  |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -T /mount1                                                              |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -T /mount2                                                              |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh findmnt                                           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | -T /mount3                                                              |                   |         |         |                     |                     |
	| mount     | -p functional-911502                                                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | --kill=true                                                             |                   |         |         |                     |                     |
	| license   |                                                                         | minikube          | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	| ssh       | functional-911502 ssh sudo                                              | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | systemctl is-active docker                                              |                   |         |         |                     |                     |
	| ssh       | functional-911502 ssh sudo                                              | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|           | systemctl is-active crio                                                |                   |         |         |                     |                     |
	| image     | functional-911502 image load --daemon                                   | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|           | kicbase/echo-server:functional-911502                                   |                   |         |         |                     |                     |
	|           | --alsologtostderr                                                       |                   |         |         |                     |                     |
	| image     | functional-911502 image ls                                              | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC |                     |
	|-----------|-------------------------------------------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:50:59
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:50:59.466349 2096800 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:50:59.466944 2096800 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:50:59.466996 2096800 out.go:358] Setting ErrFile to fd 2...
	I0916 10:50:59.467017 2096800 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:50:59.467723 2096800 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:50:59.468236 2096800 out.go:352] Setting JSON to false
	I0916 10:50:59.469291 2096800 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":138802,"bootTime":1726345058,"procs":208,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:50:59.469449 2096800 start.go:139] virtualization:  
	I0916 10:50:59.473685 2096800 out.go:177] * [functional-911502] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:50:59.476767 2096800 notify.go:220] Checking for updates...
	I0916 10:50:59.484797 2096800 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:50:59.487415 2096800 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:50:59.489703 2096800 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:50:59.492387 2096800 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:50:59.495107 2096800 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:50:59.498996 2096800 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:50:59.502035 2096800 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:50:59.502801 2096800 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:50:59.537313 2096800 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:50:59.537431 2096800 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:50:59.631678 2096800 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:48 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:50:59.621432888 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:50:59.631792 2096800 docker.go:318] overlay module found
	I0916 10:50:59.634922 2096800 out.go:177] * Using the docker driver based on existing profile
	I0916 10:50:59.637406 2096800 start.go:297] selected driver: docker
	I0916 10:50:59.637430 2096800 start.go:901] validating driver "docker" against &{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:do
cker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:50:59.637566 2096800 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:50:59.637694 2096800 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:50:59.716778 2096800 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:48 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:50:59.707082204 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:50:59.717242 2096800 cni.go:84] Creating CNI manager for ""
	I0916 10:50:59.717297 2096800 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:50:59.717351 2096800 start.go:340] cluster config:
	{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Containe
rRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUI
D:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:50:59.720176 2096800 out.go:177] * dry-run validation complete!
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	494883dd75dac       ba04bb24b9575       2 minutes ago       Running             storage-provisioner       3                   334ec243859df       storage-provisioner
	0b22f9fb6da5e       ba04bb24b9575       2 minutes ago       Exited              storage-provisioner       2                   334ec243859df       storage-provisioner
	da9cb172fba10       d3f53a98c0a9d       2 minutes ago       Running             kube-apiserver            1                   6b31db950b5d2       kube-apiserver-functional-911502
	95188fff801b2       d3f53a98c0a9d       2 minutes ago       Exited              kube-apiserver            0                   6b31db950b5d2       kube-apiserver-functional-911502
	16a7dfc9e0119       7f8aa378bb47d       2 minutes ago       Running             kube-scheduler            1                   578f22ca4016c       kube-scheduler-functional-911502
	d954d9e91e01c       279f381cb3736       2 minutes ago       Running             kube-controller-manager   1                   54de1abbce22f       kube-controller-manager-functional-911502
	1a427a607f521       27e3830e14027       2 minutes ago       Running             etcd                      1                   e43a7a67672f1       etcd-functional-911502
	472eb48e2a576       6a23fa8fd2b78       2 minutes ago       Running             kindnet-cni               1                   b400f9b4bc923       kindnet-7r2rg
	1e1c55f6d316e       24a140c548c07       2 minutes ago       Running             kube-proxy                1                   c900cfd22280f       kube-proxy-l59dx
	381c4c4cdcc1a       2f6c962e7b831       2 minutes ago       Running             coredns                   1                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	11757969f67eb       2f6c962e7b831       3 minutes ago       Exited              coredns                   0                   95b6f0353b091       coredns-7c65d6cfc9-6kw9d
	ce5a28d1cb5d2       6a23fa8fd2b78       3 minutes ago       Exited              kindnet-cni               0                   b400f9b4bc923       kindnet-7r2rg
	57c3cd94d0c59       24a140c548c07       3 minutes ago       Exited              kube-proxy                0                   c900cfd22280f       kube-proxy-l59dx
	492408bc37d38       27e3830e14027       3 minutes ago       Exited              etcd                      0                   e43a7a67672f1       etcd-functional-911502
	31265291ac7da       7f8aa378bb47d       3 minutes ago       Exited              kube-scheduler            0                   578f22ca4016c       kube-scheduler-functional-911502
	928f2c64d0a66       279f381cb3736       3 minutes ago       Exited              kube-controller-manager   0                   54de1abbce22f       kube-controller-manager-functional-911502
	
	
	==> containerd <==
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.227679017Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:kubernetes-dashboard-695b96c756-mbl59,Uid:42bb249b-bfab-43de-8434-d26beb3b5dfd,Namespace:kubernetes-dashboard,Attempt:0,}"
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.278412803Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:dashboard-metrics-scraper-c5db448b4-z5ddp,Uid:982110d1-99e0-46cb-b2b3-45fc92464cf7,Namespace:kubernetes-dashboard,Attempt:0,}"
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.334400751Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.334531868Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.334564573Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.334772022Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.391055623Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.391453914Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.391614118Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.391995334Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.455234567Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:kubernetes-dashboard-695b96c756-mbl59,Uid:42bb249b-bfab-43de-8434-d26beb3b5dfd,Namespace:kubernetes-dashboard,Attempt:0,} returns sandbox id \"75ebd5d02de4da23e5dc320b62f8bd412b951e22817d0a1db054c8f711b8f2e5\""
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.457807942Z" level=info msg="PullImage \"docker.io/kubernetesui/dashboard:v2.7.0@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93\""
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.460221170Z" level=error msg="failed to decode hosts.toml" error="invalid `host` tree"
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.543971413Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:dashboard-metrics-scraper-c5db448b4-z5ddp,Uid:982110d1-99e0-46cb-b2b3-45fc92464cf7,Namespace:kubernetes-dashboard,Attempt:0,} returns sandbox id \"c9e5b834275d0d2bc12590706c9bfdd4f603af24b058ca764b86e251b4ab0527\""
	Sep 16 10:51:03 functional-911502 containerd[3778]: time="2024-09-16T10:51:03.651872207Z" level=error msg="failed to decode hosts.toml" error="invalid `host` tree"
	Sep 16 10:51:07 functional-911502 containerd[3778]: time="2024-09-16T10:51:07.114876532Z" level=info msg="ImageCreate event name:\"docker.io/kicbase/echo-server:functional-911502\""
	Sep 16 10:51:07 functional-911502 containerd[3778]: time="2024-09-16T10:51:07.118937930Z" level=info msg="ImageCreate event name:\"sha256:ce2d2cda2d858fdaea84129deb86d18e5dbf1c548f230b79fdca74cc91729d17\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:51:07 functional-911502 containerd[3778]: time="2024-09-16T10:51:07.119323717Z" level=info msg="ImageUpdate event name:\"docker.io/kicbase/echo-server:functional-911502\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:51:08 functional-911502 containerd[3778]: time="2024-09-16T10:51:08.621801092Z" level=info msg="RemoveImage \"kicbase/echo-server:functional-911502\""
	Sep 16 10:51:08 functional-911502 containerd[3778]: time="2024-09-16T10:51:08.623556748Z" level=info msg="ImageDelete event name:\"docker.io/kicbase/echo-server:functional-911502\""
	Sep 16 10:51:08 functional-911502 containerd[3778]: time="2024-09-16T10:51:08.626966590Z" level=info msg="ImageDelete event name:\"sha256:ce2d2cda2d858fdaea84129deb86d18e5dbf1c548f230b79fdca74cc91729d17\""
	Sep 16 10:51:08 functional-911502 containerd[3778]: time="2024-09-16T10:51:08.635099970Z" level=info msg="RemoveImage \"kicbase/echo-server:functional-911502\" returns successfully"
	Sep 16 10:51:08 functional-911502 containerd[3778]: time="2024-09-16T10:51:08.986104271Z" level=info msg="ImageCreate event name:\"docker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:51:08 functional-911502 containerd[3778]: time="2024-09-16T10:51:08.988852135Z" level=info msg="stop pulling image docker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93: active requests=0, bytes read=74095421"
	Sep 16 10:51:08 functional-911502 containerd[3778]: time="2024-09-16T10:51:08.997832648Z" level=info msg="ImageCreate event name:\"sha256:20b332c9a70d8516d849d1ac23eff5800cbb2f263d379f0ec11ee908db6b25a8\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	
	
	==> coredns [11757969f67eb2558fce4fed04a5bb64075e77da314b8ca17a2f0ea0b6d90950] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:39497 - 35637 "HINFO IN 756688810597303784.5152931065563193714. udp 56 false 512" NXDOMAIN qr,rd,ra 56 0.040061481s
	[INFO] SIGTERM: Shutting down servers then terminating
	[INFO] plugin/health: Going into lameduck mode for 5s
	
	
	==> coredns [381c4c4cdcc1a65a0e4d935f5449da5929d2fea3cf4e8c057860064146546ba0] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:57690 - 32762 "HINFO IN 7394603856605586965.6142061183963741332. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.012169588s
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: services is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "services" in API group "" at the cluster scope
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "services" in API group "" at the cluster scope
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: namespaces is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "namespaces" in API group "" at the cluster scope
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User "system:serviceaccount:kube-system:coredns" cannot list resource "namespaces" in API group "" at the cluster scope
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: connect: connection refused
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	
	
	==> describe nodes <==
	Name:               functional-911502
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=functional-911502
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=functional-911502
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_47_40_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:47:36 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  functional-911502
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:51:05 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:33 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:48:42 +0000   Mon, 16 Sep 2024 10:47:36 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    functional-911502
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 43a21049ac0d40628479cf884a8089e0
	  System UUID:                2830f6a5-4b63-46c5-b24a-468e4df19b79
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (10 in total)
	  Namespace                   Name                                         CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                         ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-6kw9d                     100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m25s
	  kube-system                 etcd-functional-911502                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m32s
	  kube-system                 kindnet-7r2rg                                100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m25s
	  kube-system                 kube-apiserver-functional-911502             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m22s
	  kube-system                 kube-controller-manager-functional-911502    200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m30s
	  kube-system                 kube-proxy-l59dx                             0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m25s
	  kube-system                 kube-scheduler-functional-911502             100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m30s
	  kube-system                 storage-provisioner                          0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m25s
	  kubernetes-dashboard        dashboard-metrics-scraper-c5db448b4-z5ddp    0 (0%)        0 (0%)      0 (0%)           0 (0%)         8s
	  kubernetes-dashboard        kubernetes-dashboard-695b96c756-mbl59        0 (0%)        0 (0%)      0 (0%)           0 (0%)         8s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 3m24s                  kube-proxy       
	  Normal   Starting                 2m22s                  kube-proxy       
	  Normal   NodeAllocatableEnforced  3m30s                  kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 3m30s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m30s                  kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m30s                  kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m30s                  kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   Starting                 3m30s                  kubelet          Starting kubelet.
	  Normal   RegisteredNode           3m26s                  node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	  Normal   Starting                 2m29s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 2m29s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  2m28s (x8 over 2m28s)  kubelet          Node functional-911502 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    2m28s (x7 over 2m28s)  kubelet          Node functional-911502 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m28s (x7 over 2m28s)  kubelet          Node functional-911502 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  2m28s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           2m19s                  node-controller  Node functional-911502 event: Registered Node functional-911502 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [1a427a607f52143d7ababbbe77d9ccd5fb21bed4f47e6ea656a489787066bdd5] <==
	{"level":"info","ts":"2024-09-16T10:48:38.695375Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"aec36adc501070cc","added-peer-peer-urls":["https://192.168.49.2:2380"]}
	{"level":"info","ts":"2024-09-16T10:48:38.695629Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.695854Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.695783Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:38.699522Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T10:48:38.699870Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"aec36adc501070cc","initial-advertise-peer-urls":["https://192.168.49.2:2380"],"listen-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.49.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T10:48:38.700575Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.701150Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.700996Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T10:48:40.180057Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180110Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180142Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:48:40.180157Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180164Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180195Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.180204Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:48:40.183853Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:48:40.183916Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:40.184260Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:48:40.185178Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:40.186391Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:48:40.187443Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:48:40.188457Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:48:40.194734Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:48:40.194782Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	
	
	==> etcd [492408bc37d38a1d8712ef754136e8b589841b3096dee7a1fa2e6f6b99ce6c31] <==
	{"level":"info","ts":"2024-09-16T10:47:33.255286Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became leader at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.255359Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:47:33.262769Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.270884Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:functional-911502 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:47:33.271109Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.271402Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:47:33.272620Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.283805Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.271431Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284172Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:47:33.284977Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:47:33.289591Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:47:33.306735Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306879Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:47:33.306916Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T10:48:38.344113Z","caller":"osutil/interrupt_unix.go:64","msg":"received signal; shutting down","signal":"terminated"}
	{"level":"info","ts":"2024-09-16T10:48:38.344168Z","caller":"embed/etcd.go:377","msg":"closing etcd server","name":"functional-911502","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	{"level":"warn","ts":"2024-09-16T10:48:38.344265Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.344295Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 127.0.0.1:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.345876Z","caller":"embed/serve.go:212","msg":"stopping secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"warn","ts":"2024-09-16T10:48:38.345916Z","caller":"embed/serve.go:214","msg":"stopped secure grpc server due to error","error":"accept tcp 192.168.49.2:2379: use of closed network connection"}
	{"level":"info","ts":"2024-09-16T10:48:38.345965Z","caller":"etcdserver/server.go:1521","msg":"skipped leadership transfer for single voting member cluster","local-member-id":"aec36adc501070cc","current-leader-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:48:38.347534Z","caller":"embed/etcd.go:581","msg":"stopping serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.347628Z","caller":"embed/etcd.go:586","msg":"stopped serving peer traffic","address":"192.168.49.2:2380"}
	{"level":"info","ts":"2024-09-16T10:48:38.347663Z","caller":"embed/etcd.go:379","msg":"closed etcd server","name":"functional-911502","data-dir":"/var/lib/minikube/etcd","advertise-peer-urls":["https://192.168.49.2:2380"],"advertise-client-urls":["https://192.168.49.2:2379"]}
	
	
	==> kernel <==
	 10:51:09 up 1 day, 14:33,  0 users,  load average: 2.01, 1.28, 1.35
	Linux functional-911502 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [472eb48e2a57659caeaf99025beaec2f96e01b98d7d3d7676515ac24fb61fb58] <==
	I0916 10:49:09.220654       1 main.go:299] handling current node
	I0916 10:49:19.222817       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:19.222857       1 main.go:299] handling current node
	I0916 10:49:29.219698       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:29.219737       1 main.go:299] handling current node
	I0916 10:49:39.219237       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:39.219276       1 main.go:299] handling current node
	I0916 10:49:49.219131       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:49.219171       1 main.go:299] handling current node
	I0916 10:49:59.219970       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:49:59.220080       1 main.go:299] handling current node
	I0916 10:50:09.224203       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:09.224418       1 main.go:299] handling current node
	I0916 10:50:19.219828       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:19.219867       1 main.go:299] handling current node
	I0916 10:50:29.219398       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:29.219439       1 main.go:299] handling current node
	I0916 10:50:39.228234       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:39.228270       1 main.go:299] handling current node
	I0916 10:50:49.219663       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:49.219706       1 main.go:299] handling current node
	I0916 10:50:59.224567       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:50:59.224606       1 main.go:299] handling current node
	I0916 10:51:09.225102       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:51:09.225149       1 main.go:299] handling current node
	
	
	==> kindnet [ce5a28d1cb5d2f795fde647a3c386c48dd1884e1d03c8f60f078f03cffde690b] <==
	I0916 10:47:45.041351       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 10:47:45.041663       1 main.go:139] hostIP = 192.168.49.2
	podIP = 192.168.49.2
	I0916 10:47:45.041804       1 main.go:148] setting mtu 1500 for CNI 
	I0916 10:47:45.041819       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 10:47:45.041830       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 10:47:45.520963       1 controller.go:334] Starting controller kube-network-policies
	I0916 10:47:45.521152       1 controller.go:338] Waiting for informer caches to sync
	I0916 10:47:45.521205       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 10:47:45.722171       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:47:45.722199       1 metrics.go:61] Registering metrics
	I0916 10:47:45.722266       1 controller.go:374] Syncing nftables rules
	I0916 10:47:55.524822       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:47:55.524859       1 main.go:299] handling current node
	I0916 10:48:05.526968       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:05.527019       1 main.go:299] handling current node
	I0916 10:48:15.528186       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:15.528219       1 main.go:299] handling current node
	I0916 10:48:25.523558       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:48:25.523675       1 main.go:299] handling current node
	
	
	==> kube-apiserver [95188fff801b22ea7d5c57a472a58be0bc02010422f1867802de90863ce56801] <==
	I0916 10:48:42.839848       1 options.go:228] external host was not specified, using 192.168.49.2
	I0916 10:48:42.855248       1 server.go:142] Version: v1.31.1
	I0916 10:48:42.855626       1 server.go:144] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	E0916 10:48:42.856037       1 run.go:72] "command failed" err="failed to create listener: failed to listen on 0.0.0.0:8441: listen tcp 0.0.0.0:8441: bind: address already in use"
	
	
	==> kube-apiserver [da9cb172fba10c236aa8468446d2ed4a269aae7f05a719e0fdc4745f7eca8d12] <==
	I0916 10:48:47.151562       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:48:47.157426       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:48:47.157660       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:48:47.159024       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	I0916 10:48:47.159224       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:48:47.159303       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:48:47.159367       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:48:47.159432       1 cache.go:39] Caches are synced for autoregister controller
	I0916 10:48:47.189320       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:48:47.189540       1 policy_source.go:224] refreshing policies
	I0916 10:48:47.191266       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:48:47.230910       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:48:47.961268       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	W0916 10:48:48.300845       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2]
	I0916 10:48:48.302392       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:48:48.308164       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:48:48.837004       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:48:48.966544       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:48:48.985830       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:48:49.056843       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:48:49.065696       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:51:01.396849       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 10:51:01.558533       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 10:51:02.180634       1 alloc.go:330] "allocated clusterIPs" service="kubernetes-dashboard/kubernetes-dashboard" clusterIPs={"IPv4":"10.111.52.145"}
	I0916 10:51:02.217515       1 alloc.go:330] "allocated clusterIPs" service="kubernetes-dashboard/dashboard-metrics-scraper" clusterIPs={"IPv4":"10.107.16.51"}
	
	
	==> kube-controller-manager [928f2c64d0a660e0df1ccc73f1387a86a73d76492ee63bfd42843912ecc1fe0a] <==
	I0916 10:47:43.260717       1 shared_informer.go:320] Caches are synced for daemon sets
	I0916 10:47:43.260741       1 shared_informer.go:320] Caches are synced for legacy-service-account-token-cleaner
	I0916 10:47:43.260789       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0916 10:47:43.264947       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 10:47:43.270925       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:43.274016       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 10:47:43.289621       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 10:47:43.309403       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 10:47:43.755815       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810781       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 10:47:43.810815       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 10:47:43.822097       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:44.310052       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="384.204702ms"
	I0916 10:47:44.362819       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="52.71544ms"
	I0916 10:47:44.426732       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="63.859952ms"
	I0916 10:47:44.459758       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="32.950043ms"
	I0916 10:47:44.479101       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="19.296296ms"
	I0916 10:47:44.479182       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.386µs"
	I0916 10:47:46.277218       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="54.999µs"
	I0916 10:47:46.284221       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="47.499µs"
	I0916 10:47:46.289165       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="48.016µs"
	I0916 10:47:49.776695       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="functional-911502"
	I0916 10:47:56.302801       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="57.165µs"
	I0916 10:47:57.316220       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="18.588165ms"
	I0916 10:47:57.316368       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="102.169µs"
	
	
	==> kube-controller-manager [d954d9e91e01c0d0330d1084c35e9e29fd31db11cc4bc7b4efdc63e3c17afd45] <==
	I0916 10:51:01.620793       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="56.746261ms"
	E0916 10:51:01.621009       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.653757       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="31.565578ms"
	E0916 10:51:01.654052       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.752122       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="96.817172ms"
	E0916 10:51:01.752351       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.779052       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="131.931306ms"
	E0916 10:51:01.779266       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.823989       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="69.206055ms"
	E0916 10:51:01.824197       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.835228       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="51.706588ms"
	E0916 10:51:01.835537       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.877168       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="50.302551ms"
	E0916 10:51:01.877428       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4\" failed with pods \"dashboard-metrics-scraper-c5db448b4-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:01.877726       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="38.681906ms"
	E0916 10:51:01.877840       1 replica_set.go:560] "Unhandled Error" err="sync \"kubernetes-dashboard/kubernetes-dashboard-695b96c756\" failed with pods \"kubernetes-dashboard-695b96c756-\" is forbidden: error looking up service account kubernetes-dashboard/kubernetes-dashboard: serviceaccount \"kubernetes-dashboard\" not found" logger="UnhandledError"
	I0916 10:51:02.013803       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="133.031188ms"
	I0916 10:51:02.075833       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="194.640532ms"
	I0916 10:51:02.097439       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="83.571405ms"
	I0916 10:51:02.097659       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="164.167µs"
	I0916 10:51:02.120765       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="44.881154ms"
	I0916 10:51:02.121073       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="41.14µs"
	I0916 10:51:02.137019       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4" duration="43.495µs"
	I0916 10:51:09.475422       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="15.193489ms"
	I0916 10:51:09.475499       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kubernetes-dashboard/kubernetes-dashboard-695b96c756" duration="44.931µs"
	
	
	==> kube-proxy [1e1c55f6d316ecfe21daf226fdfd7a0c46b596f2904f3881ac5d62f21f9aa385] <==
	I0916 10:48:28.645183       1 server_linux.go:66] "Using iptables proxy"
	E0916 10:48:28.721587       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:29.873863       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:32.256620       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	E0916 10:48:36.493031       1 server.go:666] "Failed to retrieve node info" err="Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-911502\": dial tcp 192.168.49.2:8441: connect: connection refused"
	I0916 10:48:47.226267       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:48:47.226420       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:48:47.297852       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:48:47.298129       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:48:47.300753       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:48:47.301480       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:48:47.301624       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:47.312238       1 config.go:199] "Starting service config controller"
	I0916 10:48:47.312282       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:48:47.312370       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:48:47.312419       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:48:47.317887       1 config.go:328] "Starting node config controller"
	I0916 10:48:47.317941       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:48:47.413200       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:48:47.413370       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:48:47.418734       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [57c3cd94d0c597322bd62c8959dfd6008f8be2d7bb3e87f3a32026a40f60aea6] <==
	I0916 10:47:44.794046       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:47:44.895525       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:47:44.895614       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:47:44.916570       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:47:44.916637       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:47:44.918597       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:47:44.919425       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:47:44.919452       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:47:44.925419       1 config.go:199] "Starting service config controller"
	I0916 10:47:44.925472       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:47:44.925521       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:47:44.925531       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:47:44.928903       1 config.go:328] "Starting node config controller"
	I0916 10:47:44.928927       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:47:45.030179       1 shared_informer.go:320] Caches are synced for node config
	I0916 10:47:45.030253       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:47:45.030287       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [16a7dfc9e01198cab582eac129b2a3162c14e01f4ade815bbd0695fd67b02c4c] <==
	I0916 10:48:43.411981       1 serving.go:386] Generated self-signed cert in-memory
	W0916 10:48:47.054124       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 10:48:47.054270       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 10:48:47.054329       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 10:48:47.054367       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 10:48:47.137658       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:48:47.137797       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:48:47.143443       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:48:47.143739       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:48:47.146739       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:48:47.146832       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:48:47.247771       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [31265291ac7da492c3cfad84540ba2b684cdf0abad82be5c56d392df7613dc44] <==
	E0916 10:47:37.419493       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419534       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 10:47:37.419548       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419590       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 10:47:37.419607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419645       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419660       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.419704       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.419719       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422428       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0916 10:47:37.422472       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422530       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 10:47:37.422547       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.422779       1 reflector.go:561] runtime/asm_arm64.s:1222: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0916 10:47:37.422805       1 reflector.go:158] "Unhandled Error" err="runtime/asm_arm64.s:1222: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	W0916 10:47:37.422862       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 10:47:37.422883       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424481       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 10:47:37.424516       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424588       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 10:47:37.424607       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 10:47:37.424692       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 10:47:37.424724       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	I0916 10:47:38.910602       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	E0916 10:48:38.401394       1 run.go:72] "command failed" err="finished without leader elect"
	
	
	==> kubelet <==
	Sep 16 10:48:44 functional-911502 kubelet[4699]: W0916 10:48:44.086224    4699 reflector.go:561] object-"kube-system"/"coredns": failed to list *v1.ConfigMap: Get "https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/configmaps?fieldSelector=metadata.name%3Dcoredns&resourceVersion=447": dial tcp 192.168.49.2:8441: connect: connection refused
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.086285    4699 reflector.go:158] "Unhandled Error" err="object-\"kube-system\"/\"coredns\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/configmaps?fieldSelector=metadata.name%3Dcoredns&resourceVersion=447\": dial tcp 192.168.49.2:8441: connect: connection refused" logger="UnhandledError"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: W0916 10:48:44.257240    4699 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://control-plane.minikube.internal:8441/apis/node.k8s.io/v1/runtimeclasses?resourceVersion=447": dial tcp 192.168.49.2:8441: connect: connection refused
	Sep 16 10:48:44 functional-911502 kubelet[4699]: E0916 10:48:44.257307    4699 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://control-plane.minikube.internal:8441/apis/node.k8s.io/v1/runtimeclasses?resourceVersion=447\": dial tcp 192.168.49.2:8441: connect: connection refused" logger="UnhandledError"
	Sep 16 10:48:44 functional-911502 kubelet[4699]: I0916 10:48:44.911110    4699 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="846e81f7bcac6804cf5ef499ea5ac265" path="/var/lib/kubelet/pods/846e81f7bcac6804cf5ef499ea5ac265/volumes"
	Sep 16 10:48:45 functional-911502 kubelet[4699]: I0916 10:48:45.045335    4699 kubelet.go:1895] "Trying to delete pod" pod="kube-system/kube-apiserver-functional-911502" podUID="d399bd77-51dd-4ad3-90d4-6cf11e9e156e"
	Sep 16 10:48:47 functional-911502 kubelet[4699]: I0916 10:48:47.316886    4699 kubelet.go:1900] "Deleted mirror pod because it is outdated" pod="kube-system/kube-apiserver-functional-911502"
	Sep 16 10:48:47 functional-911502 kubelet[4699]: I0916 10:48:47.549623    4699 scope.go:117] "RemoveContainer" containerID="ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: I0916 10:48:48.083956    4699 scope.go:117] "RemoveContainer" containerID="ffdba63c6f84655c46fe137c275b698287290e1772409211a5a7ec5f661ce4be"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: I0916 10:48:48.084957    4699 scope.go:117] "RemoveContainer" containerID="0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc"
	Sep 16 10:48:48 functional-911502 kubelet[4699]: E0916 10:48:48.085234    4699 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"storage-provisioner\" with CrashLoopBackOff: \"back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(ecac562d-8318-4226-b5f1-61f2c76bb51b)\"" pod="kube-system/storage-provisioner" podUID="ecac562d-8318-4226-b5f1-61f2c76bb51b"
	Sep 16 10:48:51 functional-911502 kubelet[4699]: I0916 10:48:51.367187    4699 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-apiserver-functional-911502" podStartSLOduration=4.367170601 podStartE2EDuration="4.367170601s" podCreationTimestamp="2024-09-16 10:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:48:48.153684049 +0000 UTC m=+7.599562137" watchObservedRunningTime="2024-09-16 10:48:51.367170601 +0000 UTC m=+10.813048706"
	Sep 16 10:49:01 functional-911502 kubelet[4699]: I0916 10:49:01.908487    4699 scope.go:117] "RemoveContainer" containerID="0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: E0916 10:51:02.003636    4699 cpu_manager.go:395] "RemoveStaleState: removing container" podUID="846e81f7bcac6804cf5ef499ea5ac265" containerName="kube-apiserver"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.003722    4699 memory_manager.go:354] "RemoveStaleState removing state" podUID="846e81f7bcac6804cf5ef499ea5ac265" containerName="kube-apiserver"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.003733    4699 memory_manager.go:354] "RemoveStaleState removing state" podUID="846e81f7bcac6804cf5ef499ea5ac265" containerName="kube-apiserver"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: W0916 10:51:02.021783    4699 reflector.go:561] object-"kubernetes-dashboard"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:functional-911502" cannot list resource "configmaps" in API group "" in the namespace "kubernetes-dashboard": no relationship found between node 'functional-911502' and this object
	Sep 16 10:51:02 functional-911502 kubelet[4699]: E0916 10:51:02.022004    4699 reflector.go:158] "Unhandled Error" err="object-\"kubernetes-dashboard\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:functional-911502\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kubernetes-dashboard\": no relationship found between node 'functional-911502' and this object" logger="UnhandledError"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: E0916 10:51:02.072359    4699 cpu_manager.go:395] "RemoveStaleState: removing container" podUID="846e81f7bcac6804cf5ef499ea5ac265" containerName="kube-apiserver"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.122170    4699 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxq8p\" (UniqueName: \"kubernetes.io/projected/42bb249b-bfab-43de-8434-d26beb3b5dfd-kube-api-access-bxq8p\") pod \"kubernetes-dashboard-695b96c756-mbl59\" (UID: \"42bb249b-bfab-43de-8434-d26beb3b5dfd\") " pod="kubernetes-dashboard/kubernetes-dashboard-695b96c756-mbl59"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.122219    4699 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/42bb249b-bfab-43de-8434-d26beb3b5dfd-tmp-volume\") pod \"kubernetes-dashboard-695b96c756-mbl59\" (UID: \"42bb249b-bfab-43de-8434-d26beb3b5dfd\") " pod="kubernetes-dashboard/kubernetes-dashboard-695b96c756-mbl59"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.222910    4699 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2mdj\" (UniqueName: \"kubernetes.io/projected/982110d1-99e0-46cb-b2b3-45fc92464cf7-kube-api-access-q2mdj\") pod \"dashboard-metrics-scraper-c5db448b4-z5ddp\" (UID: \"982110d1-99e0-46cb-b2b3-45fc92464cf7\") " pod="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-z5ddp"
	Sep 16 10:51:02 functional-911502 kubelet[4699]: I0916 10:51:02.222978    4699 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/982110d1-99e0-46cb-b2b3-45fc92464cf7-tmp-volume\") pod \"dashboard-metrics-scraper-c5db448b4-z5ddp\" (UID: \"982110d1-99e0-46cb-b2b3-45fc92464cf7\") " pod="kubernetes-dashboard/dashboard-metrics-scraper-c5db448b4-z5ddp"
	Sep 16 10:51:03 functional-911502 kubelet[4699]: I0916 10:51:03.132461    4699 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 10:51:09 functional-911502 kubelet[4699]: I0916 10:51:09.458318    4699 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kubernetes-dashboard/kubernetes-dashboard-695b96c756-mbl59" podStartSLOduration=2.906412876 podStartE2EDuration="8.458294107s" podCreationTimestamp="2024-09-16 10:51:01 +0000 UTC" firstStartedPulling="2024-09-16 10:51:03.456940345 +0000 UTC m=+142.902818425" lastFinishedPulling="2024-09-16 10:51:09.008821494 +0000 UTC m=+148.454699656" observedRunningTime="2024-09-16 10:51:09.457857425 +0000 UTC m=+148.903735505" watchObservedRunningTime="2024-09-16 10:51:09.458294107 +0000 UTC m=+148.904172187"
	
	
	==> storage-provisioner [0b22f9fb6da5edde96c59972f9b69e219cb7ac624b0680625c5bc98b68b55bdc] <==
	I0916 10:48:47.636953       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	F0916 10:48:47.638558       1 main.go:39] error getting server version: Get "https://10.96.0.1:443/version?timeout=32s": dial tcp 10.96.0.1:443: connect: connection refused
	
	
	==> storage-provisioner [494883dd75dacc85b892b0e05225fb2278a65feec328dc17976c6f49346f27fb] <==
	I0916 10:49:02.043876       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0916 10:49:02.059730       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0916 10:49:02.059783       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0916 10:49:19.457349       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0916 10:49:19.457523       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5!
	I0916 10:49:19.457940       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"28a802e4-0156-4c92-adef-4d6f2592a206", APIVersion:"v1", ResourceVersion:"554", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5 became leader
	I0916 10:49:19.558377       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_functional-911502_0476ccd8-74e0-4006-af4c-04ca9058bfa5!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p functional-911502 -n functional-911502
helpers_test.go:261: (dbg) Run:  kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (513.523µs)
helpers_test.go:263: kubectl --context functional-911502 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/parallel/NodeLabels (4.37s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup
functional_test_tunnel_test.go:212: (dbg) Run:  kubectl --context functional-911502 apply -f testdata/testsvc.yaml
functional_test_tunnel_test.go:212: (dbg) Non-zero exit: kubectl --context functional-911502 apply -f testdata/testsvc.yaml: fork/exec /usr/local/bin/kubectl: exec format error (2.901299ms)
functional_test_tunnel_test.go:214: kubectl --context functional-911502 apply -f testdata/testsvc.yaml failed: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessDirect (107.16s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessDirect
functional_test_tunnel_test.go:288: failed to hit nginx at "http://": Temporary Error: Get "http:": http: no Host in request URL
functional_test_tunnel_test.go:290: (dbg) Run:  kubectl --context functional-911502 get svc nginx-svc
functional_test_tunnel_test.go:290: (dbg) Non-zero exit: kubectl --context functional-911502 get svc nginx-svc: fork/exec /usr/local/bin/kubectl: exec format error (5.397956ms)
functional_test_tunnel_test.go:292: kubectl --context functional-911502 get svc nginx-svc failed: fork/exec /usr/local/bin/kubectl: exec format error
functional_test_tunnel_test.go:294: failed to kubectl get svc nginx-svc:
functional_test_tunnel_test.go:301: expected body to contain "Welcome to nginx!", but got *""*
--- FAIL: TestFunctional/parallel/TunnelCmd/serial/AccessDirect (107.16s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/DeployApp (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/DeployApp
functional_test.go:1437: (dbg) Run:  kubectl --context functional-911502 create deployment hello-node --image=registry.k8s.io/echoserver-arm:1.8
functional_test.go:1437: (dbg) Non-zero exit: kubectl --context functional-911502 create deployment hello-node --image=registry.k8s.io/echoserver-arm:1.8: fork/exec /usr/local/bin/kubectl: exec format error (639.783µs)
functional_test.go:1443: failed to create hello-node deployment with this command "kubectl --context functional-911502 create deployment hello-node --image=registry.k8s.io/echoserver-arm:1.8": fork/exec /usr/local/bin/kubectl: exec format error.
--- FAIL: TestFunctional/parallel/ServiceCmd/DeployApp (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/List (0.32s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/List
functional_test.go:1459: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 service list
functional_test.go:1464: expected 'service list' to contain *hello-node* but got -"|-------------|------------|--------------|-----|\n|  NAMESPACE  |    NAME    | TARGET PORT  | URL |\n|-------------|------------|--------------|-----|\n| default     | kubernetes | No node port |     |\n| kube-system | kube-dns   | No node port |     |\n|-------------|------------|--------------|-----|\n"-
--- FAIL: TestFunctional/parallel/ServiceCmd/List (0.32s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/JSONOutput (0.34s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/JSONOutput
functional_test.go:1489: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 service list -o json
functional_test.go:1494: Took "343.470472ms" to run "out/minikube-linux-arm64 -p functional-911502 service list -o json"
functional_test.go:1498: expected the json of 'service list' to include "hello-node" but got *"[{\"Namespace\":\"default\",\"Name\":\"kubernetes\",\"URLs\":[],\"PortNames\":[\"No node port\"]},{\"Namespace\":\"kube-system\",\"Name\":\"kube-dns\",\"URLs\":[],\"PortNames\":[\"No node port\"]}]"*. args: "out/minikube-linux-arm64 -p functional-911502 service list -o json"
--- FAIL: TestFunctional/parallel/ServiceCmd/JSONOutput (0.34s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/HTTPS (0.34s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/HTTPS
functional_test.go:1509: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 service --namespace=default --https --url hello-node
functional_test.go:1509: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 service --namespace=default --https --url hello-node: exit status 115 (335.07169ms)

                                                
                                                
-- stdout --
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to SVC_NOT_FOUND: Service 'hello-node' was not found in 'default' namespace.
	You may select another namespace by using 'minikube service hello-node -n <namespace>'. Or list out all the services using 'minikube service list'

                                                
                                                
** /stderr **
functional_test.go:1511: failed to get service url. args "out/minikube-linux-arm64 -p functional-911502 service --namespace=default --https --url hello-node" : exit status 115
--- FAIL: TestFunctional/parallel/ServiceCmd/HTTPS (0.34s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/Format (0.35s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/Format
functional_test.go:1540: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 service hello-node --url --format={{.IP}}
functional_test.go:1540: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 service hello-node --url --format={{.IP}}: exit status 115 (350.25086ms)

                                                
                                                
-- stdout --
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to SVC_NOT_FOUND: Service 'hello-node' was not found in 'default' namespace.
	You may select another namespace by using 'minikube service hello-node -n <namespace>'. Or list out all the services using 'minikube service list'

                                                
                                                
** /stderr **
functional_test.go:1542: failed to get service url with custom format. args "out/minikube-linux-arm64 -p functional-911502 service hello-node --url --format={{.IP}}": exit status 115
functional_test.go:1548: "" is not a valid IP
--- FAIL: TestFunctional/parallel/ServiceCmd/Format (0.35s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/URL (0.34s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/URL
functional_test.go:1559: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 service hello-node --url
functional_test.go:1559: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 service hello-node --url: exit status 115 (340.259817ms)

                                                
                                                
-- stdout --
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to SVC_NOT_FOUND: Service 'hello-node' was not found in 'default' namespace.
	You may select another namespace by using 'minikube service hello-node -n <namespace>'. Or list out all the services using 'minikube service list'

                                                
                                                
** /stderr **
functional_test.go:1561: failed to get service url. args: "out/minikube-linux-arm64 -p functional-911502 service hello-node --url": exit status 115
functional_test.go:1565: found endpoint for hello-node: 
functional_test.go:1573: expected scheme to be -"http"- got scheme: *""*
--- FAIL: TestFunctional/parallel/ServiceCmd/URL (0.34s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/any-port (2.71s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/any-port
functional_test_mount_test.go:73: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdany-port726931808/001:/mount-9p --alsologtostderr -v=1]
functional_test_mount_test.go:107: wrote "test-1726483856463003754" to /tmp/TestFunctionalparallelMountCmdany-port726931808/001/created-by-test
functional_test_mount_test.go:107: wrote "test-1726483856463003754" to /tmp/TestFunctionalparallelMountCmdany-port726931808/001/created-by-test-removed-by-pod
functional_test_mount_test.go:107: wrote "test-1726483856463003754" to /tmp/TestFunctionalparallelMountCmdany-port726931808/001/test-1726483856463003754
functional_test_mount_test.go:115: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:115: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 ssh "findmnt -T /mount-9p | grep 9p": exit status 1 (475.172684ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test_mount_test.go:115: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:129: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh -- ls -la /mount-9p
functional_test_mount_test.go:133: guest mount directory contents
total 2
-rw-r--r-- 1 docker docker 24 Sep 16 10:50 created-by-test
-rw-r--r-- 1 docker docker 24 Sep 16 10:50 created-by-test-removed-by-pod
-rw-r--r-- 1 docker docker 24 Sep 16 10:50 test-1726483856463003754
functional_test_mount_test.go:137: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh cat /mount-9p/test-1726483856463003754
functional_test_mount_test.go:148: (dbg) Run:  kubectl --context functional-911502 replace --force -f testdata/busybox-mount-test.yaml
functional_test_mount_test.go:148: (dbg) Non-zero exit: kubectl --context functional-911502 replace --force -f testdata/busybox-mount-test.yaml: fork/exec /usr/local/bin/kubectl: exec format error (678.724µs)
functional_test_mount_test.go:150: failed to 'kubectl replace' for busybox-mount-test. args "kubectl --context functional-911502 replace --force -f testdata/busybox-mount-test.yaml" : fork/exec /usr/local/bin/kubectl: exec format error
functional_test_mount_test.go:80: "TestFunctional/parallel/MountCmd/any-port" failed, getting debug info...
functional_test_mount_test.go:81: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "mount | grep 9p; ls -la /mount-9p; cat /mount-9p/pod-dates"
functional_test_mount_test.go:81: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 ssh "mount | grep 9p; ls -la /mount-9p; cat /mount-9p/pod-dates": exit status 1 (392.443645ms)

                                                
                                                
-- stdout --
	192.168.49.1 on /mount-9p type 9p (rw,relatime,sync,dirsync,dfltuid=1000,dfltgid=999,access=any,msize=262144,trans=tcp,noextend,port=42147)
	total 2
	-rw-r--r-- 1 docker docker 24 Sep 16 10:50 created-by-test
	-rw-r--r-- 1 docker docker 24 Sep 16 10:50 created-by-test-removed-by-pod
	-rw-r--r-- 1 docker docker 24 Sep 16 10:50 test-1726483856463003754
	cat: /mount-9p/pod-dates: No such file or directory

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test_mount_test.go:83: debugging command "out/minikube-linux-arm64 -p functional-911502 ssh \"mount | grep 9p; ls -la /mount-9p; cat /mount-9p/pod-dates\"" failed : exit status 1
functional_test_mount_test.go:90: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "sudo umount -f /mount-9p"
functional_test_mount_test.go:94: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdany-port726931808/001:/mount-9p --alsologtostderr -v=1] ...
functional_test_mount_test.go:94: (dbg) [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdany-port726931808/001:/mount-9p --alsologtostderr -v=1] stdout:
* Mounting host path /tmp/TestFunctionalparallelMountCmdany-port726931808/001 into VM as /mount-9p ...
- Mount type:   9p
- User ID:      docker
- Group ID:     docker
- Version:      9p2000.L
- Message Size: 262144
- Options:      map[]
- Bind Address: 192.168.49.1:42147
* Userspace file server: ufs starting
* Successfully mounted /tmp/TestFunctionalparallelMountCmdany-port726931808/001 to /mount-9p

                                                
                                                
* NOTE: This process must stay alive for the mount to be accessible ...
* Unmounting /mount-9p ...

                                                
                                                

                                                
                                                
functional_test_mount_test.go:94: (dbg) [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdany-port726931808/001:/mount-9p --alsologtostderr -v=1] stderr:
I0916 10:50:56.563662 2095895 out.go:345] Setting OutFile to fd 1 ...
I0916 10:50:56.566033 2095895 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:50:56.566069 2095895 out.go:358] Setting ErrFile to fd 2...
I0916 10:50:56.566086 2095895 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:50:56.566393 2095895 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
I0916 10:50:56.570958 2095895 mustload.go:65] Loading cluster: functional-911502
I0916 10:50:56.576375 2095895 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:50:56.576896 2095895 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
I0916 10:50:56.605659 2095895 host.go:66] Checking if "functional-911502" exists ...
I0916 10:50:56.606007 2095895 cli_runner.go:164] Run: docker system info --format "{{json .}}"
I0916 10:50:56.755405 2095895 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:48 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:50:56.739573395 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aarc
h64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
I0916 10:50:56.755577 2095895 cli_runner.go:164] Run: docker network inspect functional-911502 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
I0916 10:50:56.804720 2095895 out.go:177] * Mounting host path /tmp/TestFunctionalparallelMountCmdany-port726931808/001 into VM as /mount-9p ...
I0916 10:50:56.807812 2095895 out.go:177]   - Mount type:   9p
I0916 10:50:56.811566 2095895 out.go:177]   - User ID:      docker
I0916 10:50:56.814177 2095895 out.go:177]   - Group ID:     docker
I0916 10:50:56.816649 2095895 out.go:177]   - Version:      9p2000.L
I0916 10:50:56.819412 2095895 out.go:177]   - Message Size: 262144
I0916 10:50:56.822819 2095895 out.go:177]   - Options:      map[]
I0916 10:50:56.829981 2095895 out.go:177]   - Bind Address: 192.168.49.1:42147
I0916 10:50:56.832403 2095895 out.go:177] * Userspace file server: 
I0916 10:50:56.832687 2095895 ssh_runner.go:195] Run: /bin/bash -c "[ "x$(findmnt -T /mount-9p | grep /mount-9p)" != "x" ] && sudo umount -f /mount-9p || echo "
I0916 10:50:56.832772 2095895 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
I0916 10:50:56.855664 2095895 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
I0916 10:50:56.968526 2095895 mount.go:180] unmount for /mount-9p ran successfully
I0916 10:50:56.968575 2095895 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /mount-9p"
I0916 10:50:56.983433 2095895 ssh_runner.go:195] Run: /bin/bash -c "sudo mount -t 9p -o dfltgid=$(grep ^docker: /etc/group | cut -d: -f3),dfltuid=$(id -u docker),msize=262144,port=42147,trans=tcp,version=9p2000.L 192.168.49.1 /mount-9p"
I0916 10:50:57.008484 2095895 main.go:125] stdlog: ufs.go:141 connected
I0916 10:50:57.008652 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tversion tag 65535 msize 262144 version '9P2000.L'
I0916 10:50:57.008695 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rversion tag 65535 msize 262144 version '9P2000'
I0916 10:50:57.009746 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tattach tag 0 fid 0 afid 4294967295 uname 'nobody' nuname 0 aname ''
I0916 10:50:57.009836 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rattach tag 0 aqid (15c6254 fa74fc4d 'd')
I0916 10:50:57.013482 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 0
I0916 10:50:57.015204 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('001' 'jenkins' 'jenkins' '' q (15c6254 fa74fc4d 'd') m d775 at 0 mt 1726483856 l 4096 t 0 d 0 ext )
I0916 10:50:57.020119 2095895 lock.go:50] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/.mount-process: {Name:mk5d1cbe0f499b2eda354c76bfb94b5e9e380812 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0916 10:50:57.020911 2095895 mount.go:105] mount successful: ""
I0916 10:50:57.023823 2095895 out.go:177] * Successfully mounted /tmp/TestFunctionalparallelMountCmdany-port726931808/001 to /mount-9p
I0916 10:50:57.026450 2095895 out.go:201] 
I0916 10:50:57.028941 2095895 out.go:177] * NOTE: This process must stay alive for the mount to be accessible ...
I0916 10:50:57.919404 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 0
I0916 10:50:57.919484 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('001' 'jenkins' 'jenkins' '' q (15c6254 fa74fc4d 'd') m d775 at 0 mt 1726483856 l 4096 t 0 d 0 ext )
I0916 10:50:57.920001 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Twalk tag 0 fid 0 newfid 1 
I0916 10:50:57.920057 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rwalk tag 0 
I0916 10:50:57.920200 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Topen tag 0 fid 1 mode 0
I0916 10:50:57.920308 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Ropen tag 0 qid (15c6254 fa74fc4d 'd') iounit 0
I0916 10:50:57.920465 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 0
I0916 10:50:57.920533 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('001' 'jenkins' 'jenkins' '' q (15c6254 fa74fc4d 'd') m d775 at 0 mt 1726483856 l 4096 t 0 d 0 ext )
I0916 10:50:57.920723 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tread tag 0 fid 1 offset 0 count 262120
I0916 10:50:57.920860 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rread tag 0 count 258
I0916 10:50:57.921024 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tread tag 0 fid 1 offset 258 count 261862
I0916 10:50:57.921060 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rread tag 0 count 0
I0916 10:50:57.921197 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tread tag 0 fid 1 offset 258 count 262120
I0916 10:50:57.921229 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rread tag 0 count 0
I0916 10:50:57.921363 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Twalk tag 0 fid 0 newfid 2 0:'created-by-test' 
I0916 10:50:57.921396 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rwalk tag 0 (15c6255 fa74fc4d '') 
I0916 10:50:57.921512 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:57.921557 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('created-by-test' 'jenkins' 'jenkins' '' q (15c6255 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:57.921690 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:57.921723 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('created-by-test' 'jenkins' 'jenkins' '' q (15c6255 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:57.921853 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tclunk tag 0 fid 2
I0916 10:50:57.921879 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rclunk tag 0
I0916 10:50:57.922026 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Twalk tag 0 fid 0 newfid 2 0:'test-1726483856463003754' 
I0916 10:50:57.922060 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rwalk tag 0 (15c6257 fa74fc4d '') 
I0916 10:50:57.922179 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:57.922212 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('test-1726483856463003754' 'jenkins' 'jenkins' '' q (15c6257 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:57.922342 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:57.922381 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('test-1726483856463003754' 'jenkins' 'jenkins' '' q (15c6257 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:57.922498 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tclunk tag 0 fid 2
I0916 10:50:57.922529 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rclunk tag 0
I0916 10:50:57.922670 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Twalk tag 0 fid 0 newfid 2 0:'created-by-test-removed-by-pod' 
I0916 10:50:57.922761 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rwalk tag 0 (15c6256 fa74fc4d '') 
I0916 10:50:57.922885 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:57.922928 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('created-by-test-removed-by-pod' 'jenkins' 'jenkins' '' q (15c6256 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:57.923053 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:57.923085 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('created-by-test-removed-by-pod' 'jenkins' 'jenkins' '' q (15c6256 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:57.923215 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tclunk tag 0 fid 2
I0916 10:50:57.923244 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rclunk tag 0
I0916 10:50:57.923379 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tread tag 0 fid 1 offset 258 count 262120
I0916 10:50:57.923424 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rread tag 0 count 0
I0916 10:50:57.923574 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tclunk tag 0 fid 1
I0916 10:50:57.923603 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rclunk tag 0
I0916 10:50:58.289038 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Twalk tag 0 fid 0 newfid 1 0:'test-1726483856463003754' 
I0916 10:50:58.289128 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rwalk tag 0 (15c6257 fa74fc4d '') 
I0916 10:50:58.289368 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 1
I0916 10:50:58.289436 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('test-1726483856463003754' 'jenkins' 'jenkins' '' q (15c6257 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:58.289618 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Twalk tag 0 fid 1 newfid 2 
I0916 10:50:58.289652 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rwalk tag 0 
I0916 10:50:58.289803 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Topen tag 0 fid 2 mode 0
I0916 10:50:58.289866 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Ropen tag 0 qid (15c6257 fa74fc4d '') iounit 0
I0916 10:50:58.290071 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 1
I0916 10:50:58.290108 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('test-1726483856463003754' 'jenkins' 'jenkins' '' q (15c6257 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:58.290453 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tread tag 0 fid 2 offset 0 count 262120
I0916 10:50:58.290511 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rread tag 0 count 24
I0916 10:50:58.290659 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tread tag 0 fid 2 offset 24 count 262120
I0916 10:50:58.290820 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rread tag 0 count 0
I0916 10:50:58.291137 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tread tag 0 fid 2 offset 24 count 262120
I0916 10:50:58.291194 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rread tag 0 count 0
I0916 10:50:58.291555 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tclunk tag 0 fid 2
I0916 10:50:58.291600 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rclunk tag 0
I0916 10:50:58.291799 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tclunk tag 0 fid 1
I0916 10:50:58.291821 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rclunk tag 0
I0916 10:50:58.677778 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 0
I0916 10:50:58.677861 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('001' 'jenkins' 'jenkins' '' q (15c6254 fa74fc4d 'd') m d775 at 0 mt 1726483856 l 4096 t 0 d 0 ext )
I0916 10:50:58.678216 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Twalk tag 0 fid 0 newfid 1 
I0916 10:50:58.678272 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rwalk tag 0 
I0916 10:50:58.678413 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Topen tag 0 fid 1 mode 0
I0916 10:50:58.678497 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Ropen tag 0 qid (15c6254 fa74fc4d 'd') iounit 0
I0916 10:50:58.678656 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 0
I0916 10:50:58.678792 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('001' 'jenkins' 'jenkins' '' q (15c6254 fa74fc4d 'd') m d775 at 0 mt 1726483856 l 4096 t 0 d 0 ext )
I0916 10:50:58.678989 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tread tag 0 fid 1 offset 0 count 262120
I0916 10:50:58.679088 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rread tag 0 count 258
I0916 10:50:58.679223 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tread tag 0 fid 1 offset 258 count 261862
I0916 10:50:58.679258 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rread tag 0 count 0
I0916 10:50:58.679376 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tread tag 0 fid 1 offset 258 count 262120
I0916 10:50:58.679407 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rread tag 0 count 0
I0916 10:50:58.679563 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Twalk tag 0 fid 0 newfid 2 0:'created-by-test' 
I0916 10:50:58.679599 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rwalk tag 0 (15c6255 fa74fc4d '') 
I0916 10:50:58.679721 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:58.679756 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('created-by-test' 'jenkins' 'jenkins' '' q (15c6255 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:58.681931 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:58.682015 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('created-by-test' 'jenkins' 'jenkins' '' q (15c6255 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:58.682163 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tclunk tag 0 fid 2
I0916 10:50:58.682233 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rclunk tag 0
I0916 10:50:58.682383 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Twalk tag 0 fid 0 newfid 2 0:'test-1726483856463003754' 
I0916 10:50:58.682460 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rwalk tag 0 (15c6257 fa74fc4d '') 
I0916 10:50:58.682565 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:58.682632 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('test-1726483856463003754' 'jenkins' 'jenkins' '' q (15c6257 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:58.682776 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:58.682832 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('test-1726483856463003754' 'jenkins' 'jenkins' '' q (15c6257 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:58.682939 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tclunk tag 0 fid 2
I0916 10:50:58.682973 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rclunk tag 0
I0916 10:50:58.683158 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Twalk tag 0 fid 0 newfid 2 0:'created-by-test-removed-by-pod' 
I0916 10:50:58.683230 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rwalk tag 0 (15c6256 fa74fc4d '') 
I0916 10:50:58.683341 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:58.683396 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('created-by-test-removed-by-pod' 'jenkins' 'jenkins' '' q (15c6256 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:58.683507 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tstat tag 0 fid 2
I0916 10:50:58.683596 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rstat tag 0 st ('created-by-test-removed-by-pod' 'jenkins' 'jenkins' '' q (15c6256 fa74fc4d '') m 644 at 0 mt 1726483856 l 24 t 0 d 0 ext )
I0916 10:50:58.683714 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tclunk tag 0 fid 2
I0916 10:50:58.683748 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rclunk tag 0
I0916 10:50:58.684043 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tread tag 0 fid 1 offset 258 count 262120
I0916 10:50:58.684113 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rread tag 0 count 0
I0916 10:50:58.684275 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tclunk tag 0 fid 1
I0916 10:50:58.684338 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rclunk tag 0
I0916 10:50:58.685662 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Twalk tag 0 fid 0 newfid 1 0:'pod-dates' 
I0916 10:50:58.685756 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rerror tag 0 ename 'file not found' ecode 0
I0916 10:50:59.036488 2095895 main.go:125] stdlog: srv_conn.go:133 >>> 192.168.49.2:51258 Tclunk tag 0 fid 0
I0916 10:50:59.036533 2095895 main.go:125] stdlog: srv_conn.go:190 <<< 192.168.49.2:51258 Rclunk tag 0
I0916 10:50:59.037472 2095895 main.go:125] stdlog: ufs.go:147 disconnected
I0916 10:50:59.065699 2095895 out.go:177] * Unmounting /mount-9p ...
I0916 10:50:59.068298 2095895 ssh_runner.go:195] Run: /bin/bash -c "[ "x$(findmnt -T /mount-9p | grep /mount-9p)" != "x" ] && sudo umount -f /mount-9p || echo "
I0916 10:50:59.077229 2095895 mount.go:180] unmount for /mount-9p ran successfully
I0916 10:50:59.077507 2095895 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/.mount-process: {Name:mk5d1cbe0f499b2eda354c76bfb94b5e9e380812 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
I0916 10:50:59.080340 2095895 out.go:201] 
W0916 10:50:59.083182 2095895 out.go:270] X Exiting due to MK_INTERRUPTED: Received terminated signal
X Exiting due to MK_INTERRUPTED: Received terminated signal
I0916 10:50:59.085780 2095895 out.go:201] 
--- FAIL: TestFunctional/parallel/MountCmd/any-port (2.71s)

                                                
                                    
x
+
TestMultiControlPlane/serial/NodeLabels (3.05s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/NodeLabels
ha_test.go:255: (dbg) Run:  kubectl --context ha-234759 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]"
ha_test.go:255: (dbg) Non-zero exit: kubectl --context ha-234759 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]": fork/exec /usr/local/bin/kubectl: exec format error (745.718µs)
ha_test.go:257: failed to 'kubectl get nodes' with args "kubectl --context ha-234759 get nodes -o \"jsonpath=[{range .items[*]}{.metadata.labels},{end}]\"": fork/exec /usr/local/bin/kubectl: exec format error
ha_test.go:264: failed to decode json from label list: args "kubectl --context ha-234759 get nodes -o \"jsonpath=[{range .items[*]}{.metadata.labels},{end}]\"": unexpected end of JSON input
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiControlPlane/serial/NodeLabels]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect ha-234759
helpers_test.go:235: (dbg) docker inspect ha-234759:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59",
	        "Created": "2024-09-16T10:51:26.447161448Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2101278,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:51:26.59587884Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/hostname",
	        "HostsPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/hosts",
	        "LogPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59-json.log",
	        "Name": "/ha-234759",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "ha-234759:/var",
	                "/lib/modules:/lib/modules:ro"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "ha-234759",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/merged",
	                "UpperDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/diff",
	                "WorkDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "volume",
	                "Name": "ha-234759",
	                "Source": "/var/lib/docker/volumes/ha-234759/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            },
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            }
	        ],
	        "Config": {
	            "Hostname": "ha-234759",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "ha-234759",
	                "name.minikube.sigs.k8s.io": "ha-234759",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "a4bc0758e88359f4099794ca40e8f9323bb5644b881b9b4ad2307dab2c5abb00",
	            "SandboxKey": "/var/run/docker/netns/a4bc0758e883",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40597"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40598"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40601"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40599"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40600"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "ha-234759": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "941929ec13d1e6034904933d29100a93cf04d9e6a30844d8d0c54e3a464c32cd",
	                    "EndpointID": "68222693ea4e7622a2bdfb3001db23b54719fbd194eac21b546910cc3e2062bd",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "ha-234759",
	                        "6306ac5a5985"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p ha-234759 -n ha-234759
helpers_test.go:244: <<< TestMultiControlPlane/serial/NodeLabels FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiControlPlane/serial/NodeLabels]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p ha-234759 logs -n 25: (1.906306029s)
helpers_test.go:252: TestMultiControlPlane/serial/NodeLabels logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| Command |                 Args                 |      Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	| image   | functional-911502                    | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	|         | image ls --format table              |                   |         |         |                     |                     |
	|         | --alsologtostderr                    |                   |         |         |                     |                     |
	| image   | functional-911502 image ls           | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	| delete  | -p functional-911502                 | functional-911502 | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:51 UTC |
	| start   | -p ha-234759 --wait=true             | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:51 UTC | 16 Sep 24 10:53 UTC |
	|         | --memory=2200 --ha                   |                   |         |         |                     |                     |
	|         | -v=7 --alsologtostderr               |                   |         |         |                     |                     |
	|         | --driver=docker                      |                   |         |         |                     |                     |
	|         | --container-runtime=containerd       |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- apply -f             | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | ./testdata/ha/ha-pod-dns-test.yaml   |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- rollout status       | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | deployment/busybox                   |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- get pods -o          | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- get pods -o          | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | jsonpath='{.items[*].metadata.name}' |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-7l4g7 --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.io               |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-kjr9x --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.io               |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-m9lsb --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.io               |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-7l4g7 --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.default          |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-kjr9x --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.default          |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-m9lsb --           |                   |         |         |                     |                     |
	|         | nslookup kubernetes.default          |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-7l4g7 -- nslookup  |                   |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-kjr9x -- nslookup  |                   |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-m9lsb -- nslookup  |                   |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- get pods -o          | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | jsonpath='{.items[*].metadata.name}' |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-7l4g7              |                   |         |         |                     |                     |
	|         | -- sh -c nslookup                    |                   |         |         |                     |                     |
	|         | host.minikube.internal | awk         |                   |         |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3              |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-7l4g7 -- sh        |                   |         |         |                     |                     |
	|         | -c ping -c 1 192.168.49.1            |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-kjr9x              |                   |         |         |                     |                     |
	|         | -- sh -c nslookup                    |                   |         |         |                     |                     |
	|         | host.minikube.internal | awk         |                   |         |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3              |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-kjr9x -- sh        |                   |         |         |                     |                     |
	|         | -c ping -c 1 192.168.49.1            |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-m9lsb              |                   |         |         |                     |                     |
	|         | -- sh -c nslookup                    |                   |         |         |                     |                     |
	|         | host.minikube.internal | awk         |                   |         |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3              |                   |         |         |                     |                     |
	| kubectl | -p ha-234759 -- exec                 | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:53 UTC |
	|         | busybox-7dff88458-m9lsb -- sh        |                   |         |         |                     |                     |
	|         | -c ping -c 1 192.168.49.1            |                   |         |         |                     |                     |
	| node    | add -p ha-234759 -v=7                | ha-234759         | jenkins | v1.34.0 | 16 Sep 24 10:53 UTC | 16 Sep 24 10:54 UTC |
	|         | --alsologtostderr                    |                   |         |         |                     |                     |
	|---------|--------------------------------------|-------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:51:21
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:51:21.236740 2100790 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:51:21.236879 2100790 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:21.236887 2100790 out.go:358] Setting ErrFile to fd 2...
	I0916 10:51:21.236893 2100790 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:21.237140 2100790 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:51:21.237558 2100790 out.go:352] Setting JSON to false
	I0916 10:51:21.238489 2100790 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":138824,"bootTime":1726345058,"procs":173,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:51:21.238569 2100790 start.go:139] virtualization:  
	I0916 10:51:21.241135 2100790 out.go:177] * [ha-234759] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:51:21.243355 2100790 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:51:21.243404 2100790 notify.go:220] Checking for updates...
	I0916 10:51:21.247599 2100790 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:51:21.249305 2100790 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:51:21.251096 2100790 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:51:21.252852 2100790 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:51:21.254707 2100790 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:51:21.256640 2100790 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:51:21.284750 2100790 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:51:21.284894 2100790 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:21.353436 2100790 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:41 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 10:51:21.343927486 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:21.353553 2100790 docker.go:318] overlay module found
	I0916 10:51:21.355603 2100790 out.go:177] * Using the docker driver based on user configuration
	I0916 10:51:21.357141 2100790 start.go:297] selected driver: docker
	I0916 10:51:21.357154 2100790 start.go:901] validating driver "docker" against <nil>
	I0916 10:51:21.357168 2100790 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:51:21.357874 2100790 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:21.410168 2100790 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:41 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 10:51:21.400729656 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:21.410385 2100790 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:51:21.410615 2100790 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:51:21.412383 2100790 out.go:177] * Using Docker driver with root privileges
	I0916 10:51:21.413806 2100790 cni.go:84] Creating CNI manager for ""
	I0916 10:51:21.413874 2100790 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0916 10:51:21.413893 2100790 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:51:21.413976 2100790 start.go:340] cluster config:
	{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:contain
erd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock:
SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:21.416766 2100790 out.go:177] * Starting "ha-234759" primary control-plane node in "ha-234759" cluster
	I0916 10:51:21.418419 2100790 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:51:21.420056 2100790 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:51:21.421575 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:51:21.421630 2100790 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:51:21.421642 2100790 cache.go:56] Caching tarball of preloaded images
	I0916 10:51:21.421667 2100790 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:51:21.421729 2100790 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:51:21.421740 2100790 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:51:21.422099 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:51:21.422139 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json: {Name:mk1338431a5f691abe348219d4be1bbe2ed8cc31 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	W0916 10:51:21.441371 2100790 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:51:21.441393 2100790 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:51:21.441539 2100790 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:51:21.441563 2100790 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:51:21.441579 2100790 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:51:21.441588 2100790 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:51:21.441611 2100790 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:51:21.443229 2100790 image.go:273] response: 
	I0916 10:51:21.559332 2100790 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:51:21.559372 2100790 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:51:21.559403 2100790 start.go:360] acquireMachinesLock for ha-234759: {Name:mk07434fa5fb218c324ac4567510c65c6e772f63 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:51:21.559529 2100790 start.go:364] duration metric: took 106.461µs to acquireMachinesLock for "ha-234759"
	I0916 10:51:21.559560 2100790 start.go:93] Provisioning new machine with config: &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP: APIServerName:minikubeCA
APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Custo
mQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:51:21.559641 2100790 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:51:21.561629 2100790 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:51:21.561866 2100790 start.go:159] libmachine.API.Create for "ha-234759" (driver="docker")
	I0916 10:51:21.561908 2100790 client.go:168] LocalClient.Create starting
	I0916 10:51:21.562009 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 10:51:21.562046 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:21.562065 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:21.562129 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 10:51:21.562154 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:21.562170 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:21.562541 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:51:21.577993 2100790 cli_runner.go:211] docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:51:21.578076 2100790 network_create.go:284] running [docker network inspect ha-234759] to gather additional debugging logs...
	I0916 10:51:21.578098 2100790 cli_runner.go:164] Run: docker network inspect ha-234759
	W0916 10:51:21.593488 2100790 cli_runner.go:211] docker network inspect ha-234759 returned with exit code 1
	I0916 10:51:21.593523 2100790 network_create.go:287] error running [docker network inspect ha-234759]: docker network inspect ha-234759: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network ha-234759 not found
	I0916 10:51:21.593537 2100790 network_create.go:289] output of [docker network inspect ha-234759]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network ha-234759 not found
	
	** /stderr **
	I0916 10:51:21.593648 2100790 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:51:21.609508 2100790 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x400164dff0}
	I0916 10:51:21.609552 2100790 network_create.go:124] attempt to create docker network ha-234759 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:51:21.609615 2100790 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=ha-234759 ha-234759
	I0916 10:51:21.679986 2100790 network_create.go:108] docker network ha-234759 192.168.49.0/24 created
	I0916 10:51:21.680022 2100790 kic.go:121] calculated static IP "192.168.49.2" for the "ha-234759" container
	I0916 10:51:21.680096 2100790 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:51:21.696483 2100790 cli_runner.go:164] Run: docker volume create ha-234759 --label name.minikube.sigs.k8s.io=ha-234759 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:51:21.711932 2100790 oci.go:103] Successfully created a docker volume ha-234759
	I0916 10:51:21.712019 2100790 cli_runner.go:164] Run: docker run --rm --name ha-234759-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759 --entrypoint /usr/bin/test -v ha-234759:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:51:22.321948 2100790 oci.go:107] Successfully prepared a docker volume ha-234759
	I0916 10:51:22.322000 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:51:22.322024 2100790 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:51:22.322098 2100790 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:51:26.380680 2100790 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.058525981s)
	I0916 10:51:26.380727 2100790 kic.go:203] duration metric: took 4.058699338s to extract preloaded images to volume ...
	W0916 10:51:26.380867 2100790 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:51:26.380980 2100790 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:51:26.433008 2100790 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-234759 --name ha-234759 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-234759 --network ha-234759 --ip 192.168.49.2 --volume ha-234759:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:51:26.776518 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Running}}
	I0916 10:51:26.800922 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:26.824391 2100790 cli_runner.go:164] Run: docker exec ha-234759 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:51:26.885690 2100790 oci.go:144] the created container "ha-234759" has a running status.
	I0916 10:51:26.885718 2100790 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa...
	I0916 10:51:27.681475 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:51:27.681524 2100790 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:51:27.702247 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:27.734249 2100790 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:51:27.734269 2100790 kic_runner.go:114] Args: [docker exec --privileged ha-234759 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:51:27.800109 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:27.818823 2100790 machine.go:93] provisionDockerMachine start ...
	I0916 10:51:27.818911 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:27.847445 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:27.847725 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40597 <nil> <nil>}
	I0916 10:51:27.847735 2100790 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:51:27.995717 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759
	
	I0916 10:51:27.995740 2100790 ubuntu.go:169] provisioning hostname "ha-234759"
	I0916 10:51:27.995808 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:28.017912 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:28.018169 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40597 <nil> <nil>}
	I0916 10:51:28.018187 2100790 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759 && echo "ha-234759" | sudo tee /etc/hostname
	I0916 10:51:28.168561 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759
	
	I0916 10:51:28.168704 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:28.185728 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:28.185976 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40597 <nil> <nil>}
	I0916 10:51:28.185995 2100790 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:51:28.322997 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:51:28.323026 2100790 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:51:28.323054 2100790 ubuntu.go:177] setting up certificates
	I0916 10:51:28.323066 2100790 provision.go:84] configureAuth start
	I0916 10:51:28.323133 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:51:28.340179 2100790 provision.go:143] copyHostCerts
	I0916 10:51:28.340229 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:51:28.340268 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:51:28.340280 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:51:28.340369 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:51:28.340463 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:51:28.340485 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:51:28.340490 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:51:28.340526 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:51:28.340578 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:51:28.340599 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:51:28.340610 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:51:28.340638 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:51:28.340701 2100790 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759 san=[127.0.0.1 192.168.49.2 ha-234759 localhost minikube]
	I0916 10:51:28.658968 2100790 provision.go:177] copyRemoteCerts
	I0916 10:51:28.659040 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:51:28.659083 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:28.675856 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:28.775958 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:51:28.776026 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:51:28.800914 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:51:28.800979 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1196 bytes)
	I0916 10:51:28.825064 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:51:28.825129 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:51:28.849401 2100790 provision.go:87] duration metric: took 526.307956ms to configureAuth
	I0916 10:51:28.849431 2100790 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:51:28.849623 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:51:28.849637 2100790 machine.go:96] duration metric: took 1.030796915s to provisionDockerMachine
	I0916 10:51:28.849644 2100790 client.go:171] duration metric: took 7.287725736s to LocalClient.Create
	I0916 10:51:28.849658 2100790 start.go:167] duration metric: took 7.287792853s to libmachine.API.Create "ha-234759"
	I0916 10:51:28.849669 2100790 start.go:293] postStartSetup for "ha-234759" (driver="docker")
	I0916 10:51:28.849678 2100790 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:51:28.849734 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:51:28.849776 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:28.866397 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:28.967933 2100790 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:51:28.971430 2100790 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:51:28.971469 2100790 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:51:28.971503 2100790 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:51:28.971515 2100790 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:51:28.971527 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:51:28.971601 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:51:28.971690 2100790 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:51:28.971702 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:51:28.971816 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:51:28.980777 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:51:29.007858 2100790 start.go:296] duration metric: took 158.172578ms for postStartSetup
	I0916 10:51:29.008304 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:51:29.025527 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:51:29.025826 2100790 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:51:29.025880 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:29.042634 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:29.135593 2100790 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:51:29.139902 2100790 start.go:128] duration metric: took 7.580245826s to createHost
	I0916 10:51:29.139929 2100790 start.go:83] releasing machines lock for "ha-234759", held for 7.580385985s
	I0916 10:51:29.139999 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:51:29.157116 2100790 ssh_runner.go:195] Run: cat /version.json
	I0916 10:51:29.157159 2100790 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:51:29.157177 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:29.157227 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:29.174648 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:29.182986 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:29.266359 2100790 ssh_runner.go:195] Run: systemctl --version
	I0916 10:51:29.394985 2100790 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:51:29.399358 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:51:29.426234 2100790 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:51:29.426365 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:51:29.454426 2100790 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:51:29.454452 2100790 start.go:495] detecting cgroup driver to use...
	I0916 10:51:29.454484 2100790 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:51:29.454535 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:51:29.468955 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:51:29.484050 2100790 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:51:29.484149 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:51:29.498384 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:51:29.513595 2100790 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:51:29.623797 2100790 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:51:29.719649 2100790 docker.go:233] disabling docker service ...
	I0916 10:51:29.719789 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:51:29.742658 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:51:29.754648 2100790 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:51:29.851434 2100790 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:51:29.945582 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:51:29.957314 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:51:29.975118 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:51:29.986788 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:51:29.997691 2100790 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:51:29.997763 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:51:30.037137 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:51:30.051230 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:51:30.064188 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:51:30.078048 2100790 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:51:30.089944 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:51:30.103181 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:51:30.117182 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:51:30.130136 2100790 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:51:30.141349 2100790 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:51:30.151581 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:51:30.249574 2100790 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:51:30.374729 2100790 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:51:30.374852 2100790 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:51:30.378835 2100790 start.go:563] Will wait 60s for crictl version
	I0916 10:51:30.378986 2100790 ssh_runner.go:195] Run: which crictl
	I0916 10:51:30.382308 2100790 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:51:30.421985 2100790 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:51:30.422089 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:51:30.443819 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:51:30.469015 2100790 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:51:30.471164 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:51:30.485573 2100790 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:51:30.489292 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:51:30.500562 2100790 kubeadm.go:883] updating cluster {Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fals
e CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:51:30.500689 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:51:30.500765 2100790 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:51:30.539014 2100790 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:51:30.539036 2100790 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:51:30.539099 2100790 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:51:30.576121 2100790 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:51:30.576144 2100790 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:51:30.576152 2100790 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 containerd true true} ...
	I0916 10:51:30.576251 2100790 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:51:30.576319 2100790 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:51:30.612571 2100790 cni.go:84] Creating CNI manager for ""
	I0916 10:51:30.612656 2100790 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 10:51:30.612679 2100790 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:51:30.612734 2100790 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:ha-234759 NodeName:ha-234759 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kuberne
tes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:51:30.612917 2100790 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "ha-234759"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:51:30.612969 2100790 kube-vip.go:115] generating kube-vip config ...
	I0916 10:51:30.613057 2100790 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:51:30.625749 2100790 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:51:30.625852 2100790 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/super-admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:51:30.625916 2100790 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:51:30.635319 2100790 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:51:30.635394 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube /etc/kubernetes/manifests
	I0916 10:51:30.644381 2100790 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (313 bytes)
	I0916 10:51:30.663252 2100790 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:51:30.681992 2100790 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2163 bytes)
	I0916 10:51:30.700307 2100790 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1447 bytes)
	I0916 10:51:30.718380 2100790 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:51:30.721958 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:51:30.733158 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:51:30.830241 2100790 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:51:30.845600 2100790 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.2
	I0916 10:51:30.845675 2100790 certs.go:194] generating shared ca certs ...
	I0916 10:51:30.845705 2100790 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:30.845874 2100790 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:51:30.845952 2100790 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:51:30.845987 2100790 certs.go:256] generating profile certs ...
	I0916 10:51:30.846069 2100790 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key
	I0916 10:51:30.846123 2100790 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt with IP's: []
	I0916 10:51:31.363342 2100790 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt ...
	I0916 10:51:31.363377 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt: {Name:mka4287c7f3ffd0700fbbe62e3c68b161d88d3cd Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.363621 2100790 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key ...
	I0916 10:51:31.363636 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key: {Name:mkf7a053e687a2a442d5973b40203c0ed8dfdc9d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.363738 2100790 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.a444410f
	I0916 10:51:31.363757 2100790 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.a444410f with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.254]
	I0916 10:51:31.666932 2100790 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.a444410f ...
	I0916 10:51:31.666964 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.a444410f: {Name:mk0cd492e2d5f75656dab8dfe27ecb072c6d1c85 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.667156 2100790 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.a444410f ...
	I0916 10:51:31.667170 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.a444410f: {Name:mk8222bc8b5c5c51d544f22dd1e577a3ae5bcd67 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.667261 2100790 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.a444410f -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt
	I0916 10:51:31.667349 2100790 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.a444410f -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key
	I0916 10:51:31.667411 2100790 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key
	I0916 10:51:31.667429 2100790 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt with IP's: []
	I0916 10:51:31.952861 2100790 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt ...
	I0916 10:51:31.952896 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt: {Name:mk9a094598ad4046a76a8dc2769982f6cbd4e0ed Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.953079 2100790 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key ...
	I0916 10:51:31.953099 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key: {Name:mkfafb6912138d9356c7537f9892bac83afdf10f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.953192 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:51:31.953211 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:51:31.953223 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:51:31.953238 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:51:31.953250 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:51:31.953266 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:51:31.953277 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:51:31.953291 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:51:31.953344 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:51:31.953382 2100790 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:51:31.953390 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:51:31.953414 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:51:31.953435 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:51:31.953462 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:51:31.953510 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:51:31.953544 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:51:31.953558 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:31.953569 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:51:31.954199 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:51:31.980348 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:51:32.008722 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:51:32.036840 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:51:32.062567 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:51:32.090284 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:51:32.115303 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:51:32.140697 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 10:51:32.166708 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:51:32.192277 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:51:32.217104 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:51:32.242554 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:51:32.262260 2100790 ssh_runner.go:195] Run: openssl version
	I0916 10:51:32.268152 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:51:32.278101 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:51:32.281770 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:51:32.281847 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:51:32.289061 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:51:32.298720 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:51:32.308514 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:32.312223 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:32.312294 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:32.319494 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:51:32.335867 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:51:32.346930 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:51:32.351420 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:51:32.351499 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:51:32.359680 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:51:32.370826 2100790 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:51:32.375023 2100790 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:51:32.375081 2100790 kubeadm.go:392] StartCluster: {Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false C
ustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:32.375171 2100790 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:51:32.375234 2100790 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:51:32.420260 2100790 cri.go:89] found id: ""
	I0916 10:51:32.420343 2100790 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:51:32.429525 2100790 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:51:32.438854 2100790 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:51:32.438927 2100790 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:51:32.447982 2100790 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:51:32.448007 2100790 kubeadm.go:157] found existing configuration files:
	
	I0916 10:51:32.448080 2100790 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:51:32.457032 2100790 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:51:32.457127 2100790 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:51:32.465953 2100790 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:51:32.475625 2100790 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:51:32.475726 2100790 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:51:32.485025 2100790 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:51:32.494669 2100790 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:51:32.494817 2100790 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:51:32.503811 2100790 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:51:32.513033 2100790 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:51:32.513125 2100790 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:51:32.522212 2100790 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:51:32.565920 2100790 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:51:32.566041 2100790 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:51:32.583353 2100790 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:51:32.583470 2100790 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:51:32.583532 2100790 kubeadm.go:310] OS: Linux
	I0916 10:51:32.583620 2100790 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:51:32.583696 2100790 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:51:32.583770 2100790 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:51:32.583836 2100790 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:51:32.583907 2100790 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:51:32.583973 2100790 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:51:32.584041 2100790 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:51:32.584114 2100790 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:51:32.584188 2100790 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:51:32.642643 2100790 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:51:32.642819 2100790 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:51:32.642953 2100790 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:51:32.650061 2100790 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:51:32.654287 2100790 out.go:235]   - Generating certificates and keys ...
	I0916 10:51:32.654479 2100790 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:51:32.654589 2100790 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:51:33.003688 2100790 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:51:33.229605 2100790 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:51:33.501301 2100790 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:51:34.304784 2100790 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:51:34.784062 2100790 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:51:34.784276 2100790 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [ha-234759 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:51:35.165100 2100790 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:51:35.165490 2100790 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [ha-234759 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:51:35.346008 2100790 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:51:35.757653 2100790 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:51:35.965344 2100790 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:51:35.965628 2100790 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:51:36.123196 2100790 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:51:36.420436 2100790 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:51:36.592158 2100790 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:51:37.128675 2100790 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:51:37.485995 2100790 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:51:37.486713 2100790 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:51:37.489599 2100790 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:51:37.491884 2100790 out.go:235]   - Booting up control plane ...
	I0916 10:51:37.491996 2100790 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:51:37.492073 2100790 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:51:37.492682 2100790 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:51:37.518113 2100790 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:51:37.526950 2100790 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:51:37.527211 2100790 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:51:37.629200 2100790 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:51:37.629328 2100790 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:51:39.131106 2100790 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.501922222s
	I0916 10:51:39.131201 2100790 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:51:45.633042 2100790 kubeadm.go:310] [api-check] The API server is healthy after 6.501922883s
	I0916 10:51:45.655776 2100790 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:51:45.671991 2100790 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:51:45.701284 2100790 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:51:45.701481 2100790 kubeadm.go:310] [mark-control-plane] Marking the node ha-234759 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:51:45.716091 2100790 kubeadm.go:310] [bootstrap-token] Using token: bovj21.slnegqx7eegl94yl
	I0916 10:51:45.718447 2100790 out.go:235]   - Configuring RBAC rules ...
	I0916 10:51:45.718576 2100790 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:51:45.730907 2100790 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:51:45.742459 2100790 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:51:45.747476 2100790 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:51:45.752869 2100790 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:51:45.761822 2100790 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:51:46.041055 2100790 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:51:46.469705 2100790 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:51:47.040170 2100790 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:51:47.041362 2100790 kubeadm.go:310] 
	I0916 10:51:47.041441 2100790 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:51:47.041453 2100790 kubeadm.go:310] 
	I0916 10:51:47.041531 2100790 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:51:47.041540 2100790 kubeadm.go:310] 
	I0916 10:51:47.041565 2100790 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:51:47.041628 2100790 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:51:47.041684 2100790 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:51:47.041693 2100790 kubeadm.go:310] 
	I0916 10:51:47.041747 2100790 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:51:47.041755 2100790 kubeadm.go:310] 
	I0916 10:51:47.041803 2100790 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:51:47.041811 2100790 kubeadm.go:310] 
	I0916 10:51:47.041863 2100790 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:51:47.041942 2100790 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:51:47.042015 2100790 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:51:47.042023 2100790 kubeadm.go:310] 
	I0916 10:51:47.042114 2100790 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:51:47.042195 2100790 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:51:47.042204 2100790 kubeadm.go:310] 
	I0916 10:51:47.042288 2100790 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token bovj21.slnegqx7eegl94yl \
	I0916 10:51:47.042399 2100790 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 10:51:47.042425 2100790 kubeadm.go:310] 	--control-plane 
	I0916 10:51:47.042435 2100790 kubeadm.go:310] 
	I0916 10:51:47.042520 2100790 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:51:47.042529 2100790 kubeadm.go:310] 
	I0916 10:51:47.042612 2100790 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token bovj21.slnegqx7eegl94yl \
	I0916 10:51:47.042749 2100790 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 10:51:47.047630 2100790 kubeadm.go:310] W0916 10:51:32.559735    1055 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:51:47.047960 2100790 kubeadm.go:310] W0916 10:51:32.561183    1055 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:51:47.048238 2100790 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:51:47.048384 2100790 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:51:47.048417 2100790 cni.go:84] Creating CNI manager for ""
	I0916 10:51:47.048425 2100790 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 10:51:47.050384 2100790 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:51:47.051909 2100790 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:51:47.056111 2100790 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:51:47.056132 2100790 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:51:47.078806 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:51:47.373387 2100790 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:51:47.373527 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:47.373625 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-234759 minikube.k8s.io/updated_at=2024_09_16T10_51_47_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-234759 minikube.k8s.io/primary=true
	I0916 10:51:47.590409 2100790 ops.go:34] apiserver oom_adj: -16
	I0916 10:51:47.590562 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:48.091656 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:48.591260 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:49.090712 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:49.590628 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:50.090756 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:50.590704 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:51.091550 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:51.200211 2100790 kubeadm.go:1113] duration metric: took 3.826732027s to wait for elevateKubeSystemPrivileges
	I0916 10:51:51.200246 2100790 kubeadm.go:394] duration metric: took 18.82517119s to StartCluster
	I0916 10:51:51.200264 2100790 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:51.200331 2100790 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:51:51.201094 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:51.201326 2100790 start.go:233] HA (multi-control plane) cluster: will skip waiting for primary control-plane node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:51:51.201357 2100790 start.go:241] waiting for startup goroutines ...
	I0916 10:51:51.201373 2100790 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:51:51.201437 2100790 addons.go:69] Setting storage-provisioner=true in profile "ha-234759"
	I0916 10:51:51.201456 2100790 addons.go:234] Setting addon storage-provisioner=true in "ha-234759"
	I0916 10:51:51.201484 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:51:51.202013 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:51.202334 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:51:51.202685 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:51:51.202750 2100790 addons.go:69] Setting default-storageclass=true in profile "ha-234759"
	I0916 10:51:51.202767 2100790 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "ha-234759"
	I0916 10:51:51.203045 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:51.232784 2100790 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:51:51.233068 2100790 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:51:51.233563 2100790 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 10:51:51.233711 2100790 addons.go:234] Setting addon default-storageclass=true in "ha-234759"
	I0916 10:51:51.233747 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:51:51.234206 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:51.245576 2100790 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:51:51.247765 2100790 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:51:51.247791 2100790 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:51:51.247857 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:51.276395 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:51.288216 2100790 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:51:51.288242 2100790 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:51:51.288307 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:51.323194 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:51.442942 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:51:51.578745 2100790 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:51:51.586010 2100790 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:51:52.029810 2100790 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:51:52.029991 2100790 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 10:51:52.030051 2100790 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 10:51:52.030174 2100790 round_trippers.go:463] GET https://192.168.49.254:8443/apis/storage.k8s.io/v1/storageclasses
	I0916 10:51:52.030199 2100790 round_trippers.go:469] Request Headers:
	I0916 10:51:52.030220 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:51:52.030237 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:51:52.045481 2100790 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 10:51:52.046320 2100790 round_trippers.go:463] PUT https://192.168.49.254:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 10:51:52.046381 2100790 round_trippers.go:469] Request Headers:
	I0916 10:51:52.046403 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:51:52.046419 2100790 round_trippers.go:473]     Content-Type: application/json
	I0916 10:51:52.046454 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:51:52.049300 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:51:52.334786 2100790 out.go:177] * Enabled addons: default-storageclass, storage-provisioner
	I0916 10:51:52.337300 2100790 addons.go:510] duration metric: took 1.135922915s for enable addons: enabled=[default-storageclass storage-provisioner]
	I0916 10:51:52.337397 2100790 start.go:246] waiting for cluster config update ...
	I0916 10:51:52.337434 2100790 start.go:255] writing updated cluster config ...
	I0916 10:51:52.339645 2100790 out.go:201] 
	I0916 10:51:52.342478 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:51:52.342628 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:51:52.345664 2100790 out.go:177] * Starting "ha-234759-m02" control-plane node in "ha-234759" cluster
	I0916 10:51:52.348517 2100790 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:51:52.351782 2100790 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:51:52.355357 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:51:52.355473 2100790 cache.go:56] Caching tarball of preloaded images
	I0916 10:51:52.355439 2100790 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:51:52.355802 2100790 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:51:52.355838 2100790 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:51:52.355993 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	W0916 10:51:52.384717 2100790 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:51:52.384735 2100790 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:51:52.384821 2100790 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:51:52.384838 2100790 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:51:52.384844 2100790 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:51:52.384852 2100790 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:51:52.384857 2100790 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:51:52.386386 2100790 image.go:273] response: 
	I0916 10:51:52.505271 2100790 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:51:52.505311 2100790 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:51:52.505343 2100790 start.go:360] acquireMachinesLock for ha-234759-m02: {Name:mk8d038416b8f502330f7520e1c7f720d49da587 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:51:52.505459 2100790 start.go:364] duration metric: took 94.506µs to acquireMachinesLock for "ha-234759-m02"
	I0916 10:51:52.505489 2100790 start.go:93] Provisioning new machine with config: &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9
PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:51:52.505574 2100790 start.go:125] createHost starting for "m02" (driver="docker")
	I0916 10:51:52.508620 2100790 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:51:52.508741 2100790 start.go:159] libmachine.API.Create for "ha-234759" (driver="docker")
	I0916 10:51:52.508770 2100790 client.go:168] LocalClient.Create starting
	I0916 10:51:52.508832 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 10:51:52.508865 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:52.508881 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:52.508937 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 10:51:52.508954 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:52.508963 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:52.509203 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:51:52.532873 2100790 network_create.go:77] Found existing network {name:ha-234759 subnet:0x40016d38f0 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 49 1] mtu:1500}
	I0916 10:51:52.532927 2100790 kic.go:121] calculated static IP "192.168.49.3" for the "ha-234759-m02" container
	I0916 10:51:52.533072 2100790 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:51:52.554402 2100790 cli_runner.go:164] Run: docker volume create ha-234759-m02 --label name.minikube.sigs.k8s.io=ha-234759-m02 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:51:52.580882 2100790 oci.go:103] Successfully created a docker volume ha-234759-m02
	I0916 10:51:52.580970 2100790 cli_runner.go:164] Run: docker run --rm --name ha-234759-m02-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759-m02 --entrypoint /usr/bin/test -v ha-234759-m02:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:51:53.343000 2100790 oci.go:107] Successfully prepared a docker volume ha-234759-m02
	I0916 10:51:53.343050 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:51:53.343072 2100790 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:51:53.343147 2100790 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:51:57.361868 2100790 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.018676781s)
	I0916 10:51:57.361899 2100790 kic.go:203] duration metric: took 4.018823003s to extract preloaded images to volume ...
	W0916 10:51:57.362036 2100790 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:51:57.362180 2100790 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:51:57.425571 2100790 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-234759-m02 --name ha-234759-m02 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759-m02 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-234759-m02 --network ha-234759 --ip 192.168.49.3 --volume ha-234759-m02:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:51:57.764960 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Running}}
	I0916 10:51:57.783068 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:51:57.811190 2100790 cli_runner.go:164] Run: docker exec ha-234759-m02 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:51:57.881077 2100790 oci.go:144] the created container "ha-234759-m02" has a running status.
	I0916 10:51:57.881102 2100790 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa...
	I0916 10:51:58.446855 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:51:58.446898 2100790 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:51:58.472337 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:51:58.494908 2100790 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:51:58.494928 2100790 kic_runner.go:114] Args: [docker exec --privileged ha-234759-m02 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:51:58.602397 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:51:58.622640 2100790 machine.go:93] provisionDockerMachine start ...
	I0916 10:51:58.622764 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:51:58.646223 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:58.646502 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40602 <nil> <nil>}
	I0916 10:51:58.646518 2100790 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:51:58.798362 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m02
	
	I0916 10:51:58.798388 2100790 ubuntu.go:169] provisioning hostname "ha-234759-m02"
	I0916 10:51:58.798460 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:51:58.819627 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:58.819965 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40602 <nil> <nil>}
	I0916 10:51:58.820003 2100790 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759-m02 && echo "ha-234759-m02" | sudo tee /etc/hostname
	I0916 10:51:58.985423 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m02
	
	I0916 10:51:58.985503 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:51:59.009340 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:59.009597 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40602 <nil> <nil>}
	I0916 10:51:59.009621 2100790 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:51:59.147069 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:51:59.147103 2100790 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:51:59.147119 2100790 ubuntu.go:177] setting up certificates
	I0916 10:51:59.147128 2100790 provision.go:84] configureAuth start
	I0916 10:51:59.147189 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:51:59.163758 2100790 provision.go:143] copyHostCerts
	I0916 10:51:59.163806 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:51:59.164072 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:51:59.164087 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:51:59.164170 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:51:59.164252 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:51:59.164269 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:51:59.164274 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:51:59.164298 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:51:59.164538 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:51:59.164570 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:51:59.164576 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:51:59.164618 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:51:59.164691 2100790 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759-m02 san=[127.0.0.1 192.168.49.3 ha-234759-m02 localhost minikube]
	I0916 10:51:59.800624 2100790 provision.go:177] copyRemoteCerts
	I0916 10:51:59.800701 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:51:59.800744 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:51:59.817936 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40602 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:51:59.915681 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:51:59.915747 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:51:59.940507 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:51:59.940571 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:51:59.964933 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:51:59.964996 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:51:59.990554 2100790 provision.go:87] duration metric: took 843.41239ms to configureAuth
	I0916 10:51:59.990623 2100790 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:51:59.990884 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:51:59.990900 2100790 machine.go:96] duration metric: took 1.368237553s to provisionDockerMachine
	I0916 10:51:59.990910 2100790 client.go:171] duration metric: took 7.482131873s to LocalClient.Create
	I0916 10:51:59.990936 2100790 start.go:167] duration metric: took 7.482195824s to libmachine.API.Create "ha-234759"
	I0916 10:51:59.990949 2100790 start.go:293] postStartSetup for "ha-234759-m02" (driver="docker")
	I0916 10:51:59.990959 2100790 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:51:59.991025 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:51:59.991073 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:52:00.028084 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40602 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:52:00.230114 2100790 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:52:00.263730 2100790 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:52:00.263833 2100790 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:52:00.263861 2100790 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:52:00.263902 2100790 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:52:00.263938 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:52:00.271489 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:52:00.272299 2100790 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:52:00.272486 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:52:00.275328 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:52:00.301426 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:52:00.377609 2100790 start.go:296] duration metric: took 386.642629ms for postStartSetup
	I0916 10:52:00.378172 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:52:00.419790 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:52:00.420162 2100790 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:52:00.420229 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:52:00.460549 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40602 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:52:00.569639 2100790 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:52:00.583439 2100790 start.go:128] duration metric: took 8.077829888s to createHost
	I0916 10:52:00.583526 2100790 start.go:83] releasing machines lock for "ha-234759-m02", held for 8.078053304s
	I0916 10:52:00.583641 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:52:00.606976 2100790 out.go:177] * Found network options:
	I0916 10:52:00.611187 2100790 out.go:177]   - NO_PROXY=192.168.49.2
	W0916 10:52:00.613159 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:52:00.613226 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:52:00.613309 2100790 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:52:00.613366 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:52:00.613715 2100790 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:52:00.613795 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:52:00.633041 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40602 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:52:00.640527 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40602 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:52:00.727777 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:52:00.861679 2100790 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:52:00.861766 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:52:00.891004 2100790 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:52:00.891082 2100790 start.go:495] detecting cgroup driver to use...
	I0916 10:52:00.891123 2100790 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:52:00.891189 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:52:00.904160 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:52:00.915856 2100790 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:52:00.915927 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:52:00.930988 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:52:00.947340 2100790 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:52:01.033056 2100790 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:52:01.124369 2100790 docker.go:233] disabling docker service ...
	I0916 10:52:01.124439 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:52:01.151205 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:52:01.164618 2100790 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:52:01.259473 2100790 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:52:01.349046 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:52:01.361334 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:52:01.382234 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:52:01.393849 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:52:01.405278 2100790 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:52:01.405400 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:52:01.416463 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:52:01.427693 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:52:01.438592 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:52:01.449156 2100790 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:52:01.459129 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:52:01.470471 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:52:01.481928 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:52:01.495717 2100790 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:52:01.505510 2100790 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:52:01.514899 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:01.602912 2100790 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:52:01.744817 2100790 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:52:01.744897 2100790 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:52:01.749031 2100790 start.go:563] Will wait 60s for crictl version
	I0916 10:52:01.749101 2100790 ssh_runner.go:195] Run: which crictl
	I0916 10:52:01.752795 2100790 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:52:01.792221 2100790 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:52:01.792298 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:52:01.814556 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:52:01.839285 2100790 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:52:01.840833 2100790 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:52:01.842864 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:52:01.858110 2100790 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:52:01.861691 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:01.872718 2100790 mustload.go:65] Loading cluster: ha-234759
	I0916 10:52:01.872937 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:01.873219 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:52:01.889828 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:52:01.890129 2100790 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.3
	I0916 10:52:01.890138 2100790 certs.go:194] generating shared ca certs ...
	I0916 10:52:01.890152 2100790 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:01.890272 2100790 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:52:01.890308 2100790 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:52:01.890315 2100790 certs.go:256] generating profile certs ...
	I0916 10:52:01.890393 2100790 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key
	I0916 10:52:01.890420 2100790 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.991f748e
	I0916 10:52:01.890433 2100790 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.991f748e with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.254]
	I0916 10:52:02.819883 2100790 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.991f748e ...
	I0916 10:52:02.819920 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.991f748e: {Name:mk59c37e23909c525e021174c1d94dbb826982fa Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:02.820129 2100790 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.991f748e ...
	I0916 10:52:02.820145 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.991f748e: {Name:mkeca968d2ea721cee90ea9eb97a24b334102416 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:02.820235 2100790 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.991f748e -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt
	I0916 10:52:02.820375 2100790 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.991f748e -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key
	I0916 10:52:02.820519 2100790 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key
	I0916 10:52:02.820538 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:52:02.820554 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:52:02.820570 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:52:02.820587 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:52:02.820603 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:52:02.820618 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:52:02.820629 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:52:02.820643 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:52:02.820695 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:52:02.820728 2100790 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:52:02.820740 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:52:02.820764 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:52:02.820794 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:52:02.820821 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:52:02.820866 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:52:02.820899 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:52:02.820934 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:52:02.820950 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:02.821015 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:52:02.840635 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:52:02.951102 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:52:02.955103 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:52:02.968406 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:52:02.973140 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1679 bytes)
	I0916 10:52:02.986161 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:52:02.989843 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:52:03.004994 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:52:03.011769 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1679 bytes)
	I0916 10:52:03.026584 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:52:03.030915 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:52:03.045806 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:52:03.049980 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1675 bytes)
	I0916 10:52:03.064058 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:52:03.091835 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:52:03.120876 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:52:03.150236 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:52:03.178864 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1436 bytes)
	I0916 10:52:03.205094 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:52:03.231522 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:52:03.257524 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 10:52:03.283880 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:52:03.310585 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:52:03.336527 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:52:03.361752 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:52:03.381652 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1679 bytes)
	I0916 10:52:03.400412 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:52:03.425598 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1679 bytes)
	I0916 10:52:03.450347 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:52:03.477087 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1675 bytes)
	I0916 10:52:03.501921 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:52:03.526309 2100790 ssh_runner.go:195] Run: openssl version
	I0916 10:52:03.533275 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:52:03.545737 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:03.550223 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:03.550366 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:03.558081 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:52:03.572153 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:52:03.591282 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:52:03.595886 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:52:03.595959 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:52:03.604395 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:52:03.616910 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:52:03.630330 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:52:03.634799 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:52:03.634880 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:52:03.650545 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:52:03.660697 2100790 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:52:03.665539 2100790 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:52:03.665590 2100790 kubeadm.go:934] updating node {m02 192.168.49.3 8443 v1.31.1 containerd true true} ...
	I0916 10:52:03.665681 2100790 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:52:03.665709 2100790 kube-vip.go:115] generating kube-vip config ...
	I0916 10:52:03.665757 2100790 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:52:03.682968 2100790 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:52:03.683042 2100790 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:52:03.683106 2100790 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:52:03.694685 2100790 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:52:03.694760 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:52:03.708316 2100790 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:52:03.731363 2100790 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:52:03.752346 2100790 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:52:03.773613 2100790 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:52:03.777406 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:03.789030 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:03.883425 2100790 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:52:03.906645 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:52:03.907112 2100790 start.go:317] joinCluster: &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2
000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:52:03.907240 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 10:52:03.907325 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:52:03.927463 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:52:04.101106 2100790 start.go:343] trying to join control-plane node "m02" to cluster: &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:52:04.101156 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token me3nxf.6d1pg723ya0vtuep --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=ha-234759-m02 --control-plane --apiserver-advertise-address=192.168.49.3 --apiserver-bind-port=8443"
	I0916 10:52:13.481760 2100790 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token me3nxf.6d1pg723ya0vtuep --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=ha-234759-m02 --control-plane --apiserver-advertise-address=192.168.49.3 --apiserver-bind-port=8443": (9.380581228s)
	I0916 10:52:13.481795 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 10:52:13.953011 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-234759-m02 minikube.k8s.io/updated_at=2024_09_16T10_52_13_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-234759 minikube.k8s.io/primary=false
	I0916 10:52:14.131350 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-234759-m02 node-role.kubernetes.io/control-plane:NoSchedule-
	I0916 10:52:14.297580 2100790 start.go:319] duration metric: took 10.390468491s to joinCluster
	I0916 10:52:14.297636 2100790 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:52:14.298016 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:14.300678 2100790 out.go:177] * Verifying Kubernetes components...
	I0916 10:52:14.302524 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:14.540418 2100790 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:52:14.560947 2100790 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:52:14.561209 2100790 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:52:14.561286 2100790 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:52:14.561506 2100790 node_ready.go:35] waiting up to 6m0s for node "ha-234759-m02" to be "Ready" ...
	I0916 10:52:14.561587 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:14.561593 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.561602 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.561606 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.584850 2100790 round_trippers.go:574] Response Status: 200 OK in 23 milliseconds
	I0916 10:52:14.586211 2100790 node_ready.go:49] node "ha-234759-m02" has status "Ready":"True"
	I0916 10:52:14.586235 2100790 node_ready.go:38] duration metric: took 24.71438ms for node "ha-234759-m02" to be "Ready" ...
	I0916 10:52:14.586246 2100790 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:52:14.586340 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:52:14.586348 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.586356 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.586360 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.592639 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:52:14.601938 2100790 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.602139 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:52:14.602166 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.602187 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.602205 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.611702 2100790 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:52:14.613067 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:14.613128 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.613152 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.613171 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.617455 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:14.618049 2100790 pod_ready.go:93] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:14.618103 2100790 pod_ready.go:82] duration metric: took 16.059189ms for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.618132 2100790 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.618226 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vqj8q
	I0916 10:52:14.618250 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.618270 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.618289 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.624350 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:52:14.625435 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:14.625491 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.625518 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.625535 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.630962 2100790 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:52:14.631768 2100790 pod_ready.go:93] pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:14.631819 2100790 pod_ready.go:82] duration metric: took 13.666606ms for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.631850 2100790 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.631951 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759
	I0916 10:52:14.631977 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.631999 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.632017 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.660011 2100790 round_trippers.go:574] Response Status: 200 OK in 27 milliseconds
	I0916 10:52:14.661121 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:14.661188 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.661209 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.661227 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.665606 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:14.666668 2100790 pod_ready.go:93] pod "etcd-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:14.666737 2100790 pod_ready.go:82] duration metric: took 34.865874ms for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.666763 2100790 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.666864 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:14.666889 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.666909 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.666923 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.677483 2100790 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:52:14.678623 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:14.678709 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.678735 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.678753 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.694229 2100790 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 10:52:15.167848 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:15.167935 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:15.167958 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:15.167979 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:15.171052 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:15.172278 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:15.172346 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:15.172370 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:15.172389 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:15.179672 2100790 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:52:15.667582 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:15.667656 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:15.667678 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:15.667697 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:15.670893 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:15.671979 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:15.672035 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:15.672058 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:15.672073 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:15.674729 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:16.167383 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:16.167461 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:16.167485 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:16.167502 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:16.171444 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:16.172570 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:16.172631 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:16.172654 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:16.172673 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:16.179641 2100790 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:52:16.667381 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:16.667451 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:16.667474 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:16.667495 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:16.670302 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:16.671585 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:16.671648 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:16.671671 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:16.671689 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:16.674182 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:16.675065 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:17.167803 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:17.167878 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:17.167900 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:17.167917 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:17.170713 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:17.171795 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:17.171854 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:17.171878 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:17.171897 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:17.175219 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:17.667820 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:17.667900 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:17.667922 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:17.667942 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:17.670692 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:17.671566 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:17.671630 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:17.671652 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:17.671684 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:17.673987 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:18.167956 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:18.167981 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:18.167991 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:18.167997 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:18.171129 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:18.171904 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:18.171925 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:18.171935 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:18.171941 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:18.174507 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:18.667557 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:18.667635 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:18.667658 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:18.667676 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:18.670277 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:18.670953 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:18.670967 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:18.670976 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:18.670981 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:18.673323 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:19.166997 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:19.167025 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:19.167034 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:19.167039 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:19.169857 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:19.170558 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:19.170579 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:19.170589 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:19.170595 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:19.172990 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:19.173467 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:19.666983 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:19.667009 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:19.667018 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:19.667023 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:19.669930 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:19.670553 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:19.670574 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:19.670583 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:19.670587 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:19.672919 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:20.168223 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:20.168254 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:20.168264 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:20.168271 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:20.171922 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:20.172981 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:20.173007 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:20.173017 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:20.173051 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:20.175974 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:20.667867 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:20.667891 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:20.667900 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:20.667904 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:20.670793 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:20.671604 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:20.671620 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:20.671630 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:20.671636 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:20.673952 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:21.166989 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:21.167014 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:21.167024 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:21.167031 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:21.169947 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:21.170947 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:21.170969 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:21.170979 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:21.170985 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:21.174293 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:21.174844 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:21.667832 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:21.667863 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:21.667873 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:21.667878 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:21.671093 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:21.671706 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:21.671729 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:21.671739 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:21.671746 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:21.674363 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:22.167512 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:22.167538 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:22.167548 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:22.167551 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:22.171401 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:22.172139 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:22.172163 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:22.172172 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:22.172176 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:22.179843 2100790 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:52:22.667683 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:22.667707 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:22.667717 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:22.667722 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:22.670670 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:22.671547 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:22.671568 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:22.671579 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:22.671584 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:22.674452 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:23.167992 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:23.168017 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:23.168025 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:23.168029 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:23.170975 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:23.172096 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:23.172118 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:23.172129 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:23.172134 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:23.174972 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:23.175476 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:23.667184 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:23.667209 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:23.667220 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:23.667225 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:23.670019 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:23.671058 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:23.671081 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:23.671090 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:23.671096 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:23.673575 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:24.167919 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:24.167946 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:24.167956 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.167961 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.170964 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:24.171998 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:24.172026 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:24.172036 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.172042 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.174538 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:24.667100 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:24.667131 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:24.667144 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.667149 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.669964 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:24.670872 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:24.670899 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:24.670909 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.670914 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.673553 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:25.167699 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:25.167724 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:25.167735 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.167741 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.170797 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:25.171606 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:25.171624 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:25.171635 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.171640 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.174513 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:25.667777 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:25.667799 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:25.667809 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.667813 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.670520 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:25.671210 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:25.671229 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:25.671238 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.671242 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.673671 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:25.674216 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:26.167934 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:26.167962 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:26.167972 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.167977 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.171870 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:26.172591 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:26.172614 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:26.172624 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.172629 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.175357 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:26.667604 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:26.667639 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:26.667724 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.667733 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.670968 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:26.672105 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:26.672127 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:26.672139 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.672143 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.675223 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:27.167599 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:27.167690 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:27.167709 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.167714 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.170579 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:27.171549 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:27.171576 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:27.171586 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.171592 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.175255 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:27.667053 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:27.667076 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:27.667085 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.667089 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.669935 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:27.670935 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:27.670952 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:27.670961 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.670966 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.673543 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:27.674283 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:28.167668 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:28.167695 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:28.167705 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.167710 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.171000 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:28.172051 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:28.172076 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:28.172088 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.172099 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.174803 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:28.667481 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:28.667503 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:28.667512 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.667516 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.670563 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:28.671256 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:28.671275 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:28.671284 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.671289 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.673782 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.167691 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:29.167715 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:29.167725 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.167730 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.170715 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.171714 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:29.171731 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:29.171740 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.171744 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.174250 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.667017 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:29.667065 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:29.667075 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.667080 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.669982 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.670811 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:29.670829 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:29.670842 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.670846 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.673571 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.167798 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:30.167829 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.167840 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.167845 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.171429 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:30.172187 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:30.172209 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.172231 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.172236 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.175751 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:30.176458 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:30.667785 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:30.667809 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.667818 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.667824 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.670772 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.671464 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:30.671487 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.671497 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.671503 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.674188 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.674744 2100790 pod_ready.go:93] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:30.674764 2100790 pod_ready.go:82] duration metric: took 16.007974361s for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.674781 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.674846 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:52:30.674857 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.674865 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.674869 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.677445 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.678126 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:30.678146 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.678157 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.678165 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.680721 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.681352 2100790 pod_ready.go:93] pod "kube-apiserver-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:30.681374 2100790 pod_ready.go:82] duration metric: took 6.583737ms for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.681387 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.681496 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:52:30.681507 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.681515 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.681520 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.684263 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.685052 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:30.685069 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.685079 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.685083 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.687823 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.688563 2100790 pod_ready.go:93] pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:30.688597 2100790 pod_ready.go:82] duration metric: took 7.201514ms for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.688615 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.688706 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:52:30.688716 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.688724 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.688728 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.691668 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.692464 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:30.692482 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.692492 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.692497 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.695275 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.695848 2100790 pod_ready.go:93] pod "kube-controller-manager-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:30.695867 2100790 pod_ready.go:82] duration metric: took 7.234695ms for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.695879 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.695941 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:52:30.695950 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.695959 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.695965 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.698602 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.699792 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:30.699814 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.699824 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.699829 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.702393 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:31.196840 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:52:31.196873 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.196883 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.196887 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.199973 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:31.200902 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:31.200924 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.200935 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.200939 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.203645 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:31.204177 2100790 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:31.204198 2100790 pod_ready.go:82] duration metric: took 508.31135ms for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:31.204210 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:31.268182 2100790 request.go:632] Waited for 63.903624ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:52:31.268295 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:52:31.268308 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.268317 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.268321 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.271468 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:31.468456 2100790 request.go:632] Waited for 195.984535ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:31.468530 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:31.468567 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.468581 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.468586 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.471707 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:31.472503 2100790 pod_ready.go:93] pod "kube-proxy-f4jm2" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:31.472525 2100790 pod_ready.go:82] duration metric: took 268.307873ms for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:31.472537 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:31.667967 2100790 request.go:632] Waited for 195.308675ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:52:31.668056 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:52:31.668073 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.668083 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.668091 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.671256 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:31.868481 2100790 request.go:632] Waited for 196.381744ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:31.868541 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:31.868546 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.868556 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.868562 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.871496 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:31.872041 2100790 pod_ready.go:93] pod "kube-proxy-gwdl4" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:31.872065 2100790 pod_ready.go:82] duration metric: took 399.491207ms for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:31.872077 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:32.068578 2100790 request.go:632] Waited for 196.397932ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:52:32.068661 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:52:32.068674 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.068682 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.068687 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.072060 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:32.267941 2100790 request.go:632] Waited for 195.260306ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:32.268053 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:32.268063 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.268080 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.268084 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.271031 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:32.271654 2100790 pod_ready.go:93] pod "kube-scheduler-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:32.271675 2100790 pod_ready.go:82] duration metric: took 399.575473ms for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:32.271687 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:32.468750 2100790 request.go:632] Waited for 196.979688ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:52:32.468841 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:52:32.468855 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.468865 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.468871 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.471963 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:32.667847 2100790 request.go:632] Waited for 195.248598ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:32.667900 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:32.667922 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.667934 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.667938 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.670669 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:32.671452 2100790 pod_ready.go:93] pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:32.671476 2100790 pod_ready.go:82] duration metric: took 399.781806ms for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:32.671490 2100790 pod_ready.go:39] duration metric: took 18.085232464s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:52:32.671504 2100790 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:52:32.671578 2100790 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:52:32.682804 2100790 api_server.go:72] duration metric: took 18.385130201s to wait for apiserver process to appear ...
	I0916 10:52:32.682827 2100790 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:52:32.682865 2100790 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:52:32.691984 2100790 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:52:32.692057 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:52:32.692067 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.692076 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.692083 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.692953 2100790 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 10:52:32.693059 2100790 api_server.go:141] control plane version: v1.31.1
	I0916 10:52:32.693075 2100790 api_server.go:131] duration metric: took 10.240715ms to wait for apiserver health ...
	I0916 10:52:32.693083 2100790 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:52:32.868465 2100790 request.go:632] Waited for 175.304271ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:52:32.868523 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:52:32.868529 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.868538 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.868546 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.873423 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:32.879501 2100790 system_pods.go:59] 17 kube-system pods found
	I0916 10:52:32.879541 2100790 system_pods.go:61] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:52:32.879548 2100790 system_pods.go:61] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:52:32.879553 2100790 system_pods.go:61] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:52:32.879559 2100790 system_pods.go:61] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:52:32.879563 2100790 system_pods.go:61] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:52:32.879568 2100790 system_pods.go:61] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:52:32.879572 2100790 system_pods.go:61] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:52:32.879576 2100790 system_pods.go:61] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:52:32.879581 2100790 system_pods.go:61] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:52:32.879593 2100790 system_pods.go:61] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:52:32.879599 2100790 system_pods.go:61] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:52:32.879607 2100790 system_pods.go:61] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:52:32.879611 2100790 system_pods.go:61] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:52:32.879614 2100790 system_pods.go:61] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:52:32.879618 2100790 system_pods.go:61] "kube-vip-ha-234759" [41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e] Running
	I0916 10:52:32.879622 2100790 system_pods.go:61] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:52:32.879625 2100790 system_pods.go:61] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:52:32.879633 2100790 system_pods.go:74] duration metric: took 186.543914ms to wait for pod list to return data ...
	I0916 10:52:32.879643 2100790 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:52:33.068049 2100790 request.go:632] Waited for 188.323464ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:52:33.068141 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:52:33.068152 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:33.068162 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:33.068172 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:33.071458 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:33.071767 2100790 default_sa.go:45] found service account: "default"
	I0916 10:52:33.071790 2100790 default_sa.go:55] duration metric: took 192.139596ms for default service account to be created ...
	I0916 10:52:33.071800 2100790 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:52:33.268106 2100790 request.go:632] Waited for 196.218709ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:52:33.268195 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:52:33.268211 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:33.268220 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:33.268226 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:33.272528 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:33.285834 2100790 system_pods.go:86] 17 kube-system pods found
	I0916 10:52:33.285880 2100790 system_pods.go:89] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:52:33.285888 2100790 system_pods.go:89] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:52:33.285894 2100790 system_pods.go:89] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:52:33.285899 2100790 system_pods.go:89] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:52:33.285904 2100790 system_pods.go:89] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:52:33.285908 2100790 system_pods.go:89] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:52:33.285912 2100790 system_pods.go:89] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:52:33.285917 2100790 system_pods.go:89] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:52:33.285922 2100790 system_pods.go:89] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:52:33.285934 2100790 system_pods.go:89] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:52:33.285938 2100790 system_pods.go:89] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:52:33.285946 2100790 system_pods.go:89] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:52:33.285950 2100790 system_pods.go:89] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:52:33.285958 2100790 system_pods.go:89] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:52:33.285962 2100790 system_pods.go:89] "kube-vip-ha-234759" [41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e] Running
	I0916 10:52:33.285966 2100790 system_pods.go:89] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:52:33.285969 2100790 system_pods.go:89] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:52:33.285978 2100790 system_pods.go:126] duration metric: took 214.173279ms to wait for k8s-apps to be running ...
	I0916 10:52:33.285988 2100790 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:52:33.286050 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:52:33.298444 2100790 system_svc.go:56] duration metric: took 12.444057ms WaitForService to wait for kubelet
	I0916 10:52:33.298535 2100790 kubeadm.go:582] duration metric: took 19.000872213s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:52:33.298573 2100790 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:52:33.467815 2100790 request.go:632] Waited for 169.134373ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:52:33.467912 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:52:33.467927 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:33.467936 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:33.467941 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:33.471310 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:33.472264 2100790 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:52:33.472297 2100790 node_conditions.go:123] node cpu capacity is 2
	I0916 10:52:33.472310 2100790 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:52:33.472315 2100790 node_conditions.go:123] node cpu capacity is 2
	I0916 10:52:33.472321 2100790 node_conditions.go:105] duration metric: took 173.737143ms to run NodePressure ...
	I0916 10:52:33.472334 2100790 start.go:241] waiting for startup goroutines ...
	I0916 10:52:33.472361 2100790 start.go:255] writing updated cluster config ...
	I0916 10:52:33.475639 2100790 out.go:201] 
	I0916 10:52:33.478649 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:33.478907 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:52:33.481979 2100790 out.go:177] * Starting "ha-234759-m03" control-plane node in "ha-234759" cluster
	I0916 10:52:33.484445 2100790 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:52:33.486955 2100790 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:52:33.489424 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:52:33.489443 2100790 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:52:33.489515 2100790 cache.go:56] Caching tarball of preloaded images
	I0916 10:52:33.489717 2100790 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:52:33.489746 2100790 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:52:33.489911 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	W0916 10:52:33.509372 2100790 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:52:33.509396 2100790 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:52:33.509476 2100790 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:52:33.509500 2100790 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:52:33.509508 2100790 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:52:33.509516 2100790 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:52:33.509522 2100790 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:52:33.510938 2100790 image.go:273] response: 
	I0916 10:52:33.636459 2100790 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:52:33.636501 2100790 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:52:33.636535 2100790 start.go:360] acquireMachinesLock for ha-234759-m03: {Name:mk5869e6facf3d1797569b1a88c6d42d2b487fed Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:52:33.636661 2100790 start.go:364] duration metric: took 103.466µs to acquireMachinesLock for "ha-234759-m03"
	I0916 10:52:33.636694 2100790 start.go:93] Provisioning new machine with config: &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m03 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:fals
e kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: Sock
etVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m03 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:52:33.636830 2100790 start.go:125] createHost starting for "m03" (driver="docker")
	I0916 10:52:33.640186 2100790 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:52:33.640309 2100790 start.go:159] libmachine.API.Create for "ha-234759" (driver="docker")
	I0916 10:52:33.640345 2100790 client.go:168] LocalClient.Create starting
	I0916 10:52:33.640440 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 10:52:33.640482 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:52:33.640501 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:52:33.640559 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 10:52:33.640581 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:52:33.640593 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:52:33.640856 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:52:33.657119 2100790 network_create.go:77] Found existing network {name:ha-234759 subnet:0x4002072780 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 49 1] mtu:1500}
	I0916 10:52:33.657166 2100790 kic.go:121] calculated static IP "192.168.49.4" for the "ha-234759-m03" container
	I0916 10:52:33.657247 2100790 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:52:33.675429 2100790 cli_runner.go:164] Run: docker volume create ha-234759-m03 --label name.minikube.sigs.k8s.io=ha-234759-m03 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:52:33.693469 2100790 oci.go:103] Successfully created a docker volume ha-234759-m03
	I0916 10:52:33.693560 2100790 cli_runner.go:164] Run: docker run --rm --name ha-234759-m03-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759-m03 --entrypoint /usr/bin/test -v ha-234759-m03:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:52:34.347834 2100790 oci.go:107] Successfully prepared a docker volume ha-234759-m03
	I0916 10:52:34.347879 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:52:34.347900 2100790 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:52:34.347981 2100790 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759-m03:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:52:39.731323 2100790 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759-m03:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (5.383287092s)
	I0916 10:52:39.731356 2100790 kic.go:203] duration metric: took 5.383452146s to extract preloaded images to volume ...
	W0916 10:52:39.731499 2100790 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:52:39.731619 2100790 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:52:39.799291 2100790 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-234759-m03 --name ha-234759-m03 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759-m03 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-234759-m03 --network ha-234759 --ip 192.168.49.4 --volume ha-234759-m03:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:52:40.207901 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m03 --format={{.State.Running}}
	I0916 10:52:40.227563 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m03 --format={{.State.Status}}
	I0916 10:52:40.249716 2100790 cli_runner.go:164] Run: docker exec ha-234759-m03 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:52:40.334625 2100790 oci.go:144] the created container "ha-234759-m03" has a running status.
	I0916 10:52:40.334657 2100790 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa...
	I0916 10:52:41.936482 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:52:41.936533 2100790 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:52:41.957360 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m03 --format={{.State.Status}}
	I0916 10:52:41.976652 2100790 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:52:41.976677 2100790 kic_runner.go:114] Args: [docker exec --privileged ha-234759-m03 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:52:42.046361 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m03 --format={{.State.Status}}
	I0916 10:52:42.066953 2100790 machine.go:93] provisionDockerMachine start ...
	I0916 10:52:42.067080 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:42.092879 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:42.093191 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40607 <nil> <nil>}
	I0916 10:52:42.093213 2100790 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:52:42.248003 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m03
	
	I0916 10:52:42.248035 2100790 ubuntu.go:169] provisioning hostname "ha-234759-m03"
	I0916 10:52:42.248119 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:42.270492 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:42.270836 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40607 <nil> <nil>}
	I0916 10:52:42.270858 2100790 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759-m03 && echo "ha-234759-m03" | sudo tee /etc/hostname
	I0916 10:52:42.440760 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m03
	
	I0916 10:52:42.440849 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:42.459977 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:42.460342 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40607 <nil> <nil>}
	I0916 10:52:42.460368 2100790 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759-m03' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759-m03/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759-m03' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:52:42.598999 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:52:42.599024 2100790 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:52:42.599041 2100790 ubuntu.go:177] setting up certificates
	I0916 10:52:42.599054 2100790 provision.go:84] configureAuth start
	I0916 10:52:42.599118 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m03
	I0916 10:52:42.621734 2100790 provision.go:143] copyHostCerts
	I0916 10:52:42.621792 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:52:42.621825 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:52:42.621835 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:52:42.621915 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:52:42.621999 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:52:42.622023 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:52:42.622032 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:52:42.622060 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:52:42.622119 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:52:42.622135 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:52:42.622139 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:52:42.622167 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:52:42.622249 2100790 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759-m03 san=[127.0.0.1 192.168.49.4 ha-234759-m03 localhost minikube]
	I0916 10:52:43.168638 2100790 provision.go:177] copyRemoteCerts
	I0916 10:52:43.168723 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:52:43.168768 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:43.191605 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40607 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:52:43.292375 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:52:43.292458 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:52:43.320575 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:52:43.320646 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:52:43.352225 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:52:43.352289 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:52:43.383018 2100790 provision.go:87] duration metric: took 783.94891ms to configureAuth
	I0916 10:52:43.383048 2100790 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:52:43.383295 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:43.383310 2100790 machine.go:96] duration metric: took 1.316334103s to provisionDockerMachine
	I0916 10:52:43.383316 2100790 client.go:171] duration metric: took 9.742962387s to LocalClient.Create
	I0916 10:52:43.383336 2100790 start.go:167] duration metric: took 9.743028275s to libmachine.API.Create "ha-234759"
	I0916 10:52:43.383347 2100790 start.go:293] postStartSetup for "ha-234759-m03" (driver="docker")
	I0916 10:52:43.383356 2100790 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:52:43.383412 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:52:43.383459 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:43.402806 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40607 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:52:43.501362 2100790 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:52:43.505154 2100790 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:52:43.505192 2100790 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:52:43.505204 2100790 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:52:43.505212 2100790 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:52:43.505223 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:52:43.505291 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:52:43.505373 2100790 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:52:43.505385 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:52:43.505491 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:52:43.514787 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:52:43.544031 2100790 start.go:296] duration metric: took 160.669508ms for postStartSetup
	I0916 10:52:43.544507 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m03
	I0916 10:52:43.563659 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:52:43.563988 2100790 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:52:43.564040 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:43.589880 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40607 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:52:43.696567 2100790 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:52:43.703829 2100790 start.go:128] duration metric: took 10.066981633s to createHost
	I0916 10:52:43.703851 2100790 start.go:83] releasing machines lock for "ha-234759-m03", held for 10.067174879s
	I0916 10:52:43.703941 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m03
	I0916 10:52:43.728755 2100790 out.go:177] * Found network options:
	I0916 10:52:43.730277 2100790 out.go:177]   - NO_PROXY=192.168.49.2,192.168.49.3
	W0916 10:52:43.732042 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:52:43.732070 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:52:43.732093 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:52:43.732109 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:52:43.732178 2100790 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:52:43.732226 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:43.732508 2100790 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:52:43.732560 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:43.755278 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40607 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:52:43.756863 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40607 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:52:43.985074 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:52:44.025512 2100790 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:52:44.025599 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:52:44.059947 2100790 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:52:44.059975 2100790 start.go:495] detecting cgroup driver to use...
	I0916 10:52:44.060012 2100790 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:52:44.060066 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:52:44.073405 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:52:44.085704 2100790 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:52:44.085771 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:52:44.100755 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:52:44.122230 2100790 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:52:44.217546 2100790 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:52:44.314829 2100790 docker.go:233] disabling docker service ...
	I0916 10:52:44.314906 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:52:44.340640 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:52:44.354834 2100790 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:52:44.449190 2100790 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:52:44.570654 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:52:44.587618 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:52:44.609284 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:52:44.622641 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:52:44.635130 2100790 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:52:44.635207 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:52:44.647258 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:52:44.658735 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:52:44.669861 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:52:44.681123 2100790 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:52:44.691962 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:52:44.702270 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:52:44.713190 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:52:44.724009 2100790 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:52:44.732828 2100790 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:52:44.741682 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:44.828595 2100790 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:52:44.971335 2100790 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:52:44.971419 2100790 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:52:44.977263 2100790 start.go:563] Will wait 60s for crictl version
	I0916 10:52:44.977335 2100790 ssh_runner.go:195] Run: which crictl
	I0916 10:52:44.981436 2100790 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:52:45.071091 2100790 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:52:45.071255 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:52:45.118340 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:52:45.204671 2100790 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:52:45.206516 2100790 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:52:45.208432 2100790 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3
	I0916 10:52:45.210455 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:52:45.233293 2100790 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:52:45.238776 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:45.256824 2100790 mustload.go:65] Loading cluster: ha-234759
	I0916 10:52:45.257099 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:45.257390 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:52:45.280225 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:52:45.280623 2100790 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.4
	I0916 10:52:45.280642 2100790 certs.go:194] generating shared ca certs ...
	I0916 10:52:45.280658 2100790 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:45.280866 2100790 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:52:45.280923 2100790 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:52:45.280933 2100790 certs.go:256] generating profile certs ...
	I0916 10:52:45.281028 2100790 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key
	I0916 10:52:45.281065 2100790 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.20d6ef76
	I0916 10:52:45.281084 2100790 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.20d6ef76 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.4 192.168.49.254]
	I0916 10:52:45.923599 2100790 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.20d6ef76 ...
	I0916 10:52:45.923631 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.20d6ef76: {Name:mk3222a7a83ee320a551ff049bf10a6fbc2613f9 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:45.923840 2100790 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.20d6ef76 ...
	I0916 10:52:45.923855 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.20d6ef76: {Name:mk0605f9dac8410eefcf89ab8e4a9d0e499298f0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:45.923954 2100790 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.20d6ef76 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt
	I0916 10:52:45.924110 2100790 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.20d6ef76 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key
	I0916 10:52:45.924247 2100790 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key
	I0916 10:52:45.924268 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:52:45.924284 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:52:45.924299 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:52:45.924312 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:52:45.924328 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:52:45.924342 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:52:45.924352 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:52:45.924367 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:52:45.924420 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:52:45.924453 2100790 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:52:45.924463 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:52:45.924492 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:52:45.924538 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:52:45.924560 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:52:45.924605 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:52:45.924638 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:45.924653 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:52:45.924669 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:52:45.924738 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:52:45.943668 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:52:46.039025 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:52:46.042740 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:52:46.055007 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:52:46.060129 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1679 bytes)
	I0916 10:52:46.075570 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:52:46.078959 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:52:46.091670 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:52:46.095768 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1679 bytes)
	I0916 10:52:46.108355 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:52:46.111625 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:52:46.124092 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:52:46.127582 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1675 bytes)
	I0916 10:52:46.139778 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:52:46.165776 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:52:46.190671 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:52:46.215910 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:52:46.241695 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1444 bytes)
	I0916 10:52:46.269182 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:52:46.294302 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:52:46.320006 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 10:52:46.352325 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:52:46.379052 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:52:46.408296 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:52:46.435056 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:52:46.453397 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1679 bytes)
	I0916 10:52:46.472282 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:52:46.496774 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1679 bytes)
	I0916 10:52:46.517017 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:52:46.537412 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1675 bytes)
	I0916 10:52:46.563962 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:52:46.593089 2100790 ssh_runner.go:195] Run: openssl version
	I0916 10:52:46.600293 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:52:46.610760 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:46.618863 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:46.618985 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:46.631345 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:52:46.644183 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:52:46.654488 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:52:46.658504 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:52:46.658577 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:52:46.665792 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:52:46.677776 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:52:46.692688 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:52:46.696966 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:52:46.697043 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:52:46.705113 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:52:46.714753 2100790 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:52:46.718120 2100790 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:52:46.718173 2100790 kubeadm.go:934] updating node {m03 192.168.49.4 8443 v1.31.1 containerd true true} ...
	I0916 10:52:46.718262 2100790 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759-m03 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.4
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:52:46.718305 2100790 kube-vip.go:115] generating kube-vip config ...
	I0916 10:52:46.718357 2100790 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:52:46.731814 2100790 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:52:46.731903 2100790 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:52:46.731987 2100790 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:52:46.741559 2100790 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:52:46.741630 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:52:46.751422 2100790 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:52:46.770950 2100790 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:52:46.793954 2100790 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:52:46.818114 2100790 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:52:46.822385 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:46.835374 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:46.936591 2100790 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:52:46.953510 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:52:46.953846 2100790 start.go:317] joinCluster: &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:f
alse kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClien
tPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:52:46.954013 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 10:52:46.954121 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:52:46.975492 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:52:47.152327 2100790 start.go:343] trying to join control-plane node "m03" to cluster: &{Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:52:47.152388 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 6gxztb.uf8jk7w93bf819wg --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=ha-234759-m03 --control-plane --apiserver-advertise-address=192.168.49.4 --apiserver-bind-port=8443"
	I0916 10:52:58.761983 2100790 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 6gxztb.uf8jk7w93bf819wg --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=ha-234759-m03 --control-plane --apiserver-advertise-address=192.168.49.4 --apiserver-bind-port=8443": (11.609570413s)
	I0916 10:52:58.762012 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 10:52:59.293778 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-234759-m03 minikube.k8s.io/updated_at=2024_09_16T10_52_59_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-234759 minikube.k8s.io/primary=false
	I0916 10:52:59.507711 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-234759-m03 node-role.kubernetes.io/control-plane:NoSchedule-
	I0916 10:52:59.744320 2100790 start.go:319] duration metric: took 12.790468782s to joinCluster
	I0916 10:52:59.744379 2100790 start.go:235] Will wait 6m0s for node &{Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:52:59.744786 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:59.747586 2100790 out.go:177] * Verifying Kubernetes components...
	I0916 10:52:59.750545 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:59.987246 2100790 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:53:00.001755 2100790 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:53:00.002039 2100790 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:53:00.002115 2100790 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:53:00.002350 2100790 node_ready.go:35] waiting up to 6m0s for node "ha-234759-m03" to be "Ready" ...
	I0916 10:53:00.002445 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:00.002451 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.002460 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.002465 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.006214 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:00.007796 2100790 node_ready.go:49] node "ha-234759-m03" has status "Ready":"True"
	I0916 10:53:00.007824 2100790 node_ready.go:38] duration metric: took 5.441976ms for node "ha-234759-m03" to be "Ready" ...
	I0916 10:53:00.007835 2100790 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:53:00.007918 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:00.007925 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.007935 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.007938 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.017585 2100790 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:53:00.030856 2100790 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.031094 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:53:00.031138 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.031169 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.031189 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.035615 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:00.037228 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:00.037251 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.037262 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.037266 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.043707 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:53:00.044954 2100790 pod_ready.go:93] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:00.044979 2100790 pod_ready.go:82] duration metric: took 14.025438ms for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.044993 2100790 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.045075 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vqj8q
	I0916 10:53:00.045080 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.045089 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.045093 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.049854 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:00.051531 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:00.051612 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.051636 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.051654 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.055149 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:00.056419 2100790 pod_ready.go:93] pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:00.056503 2100790 pod_ready.go:82] duration metric: took 11.50067ms for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.056536 2100790 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.056669 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759
	I0916 10:53:00.056696 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.056733 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.056755 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.063099 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:53:00.064699 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:00.064794 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.064817 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.064837 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.069742 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:00.071220 2100790 pod_ready.go:93] pod "etcd-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:00.071309 2100790 pod_ready.go:82] duration metric: took 14.733413ms for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.071337 2100790 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.071483 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:53:00.071516 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.071552 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.071577 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.076656 2100790 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:53:00.078231 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:00.078320 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.078351 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.078370 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.083830 2100790 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:53:00.085105 2100790 pod_ready.go:93] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:00.085188 2100790 pod_ready.go:82] duration metric: took 13.826145ms for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.085223 2100790 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.202539 2100790 request.go:632] Waited for 117.151865ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:00.202703 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:00.202733 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.202756 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.202790 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.214865 2100790 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:53:00.414279 2100790 request.go:632] Waited for 198.21169ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:00.414357 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:00.414364 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.414374 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.414379 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.425661 2100790 round_trippers.go:574] Response Status: 200 OK in 11 milliseconds
	I0916 10:53:00.603392 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:00.603419 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.603430 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.603435 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.606262 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:00.802739 2100790 request.go:632] Waited for 195.263039ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:00.802808 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:00.802817 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.802826 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.802835 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.805423 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:01.085837 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:01.085859 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:01.085871 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.085877 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.096552 2100790 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:53:01.202801 2100790 request.go:632] Waited for 105.299779ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:01.202869 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:01.202878 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:01.202891 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.202901 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.210211 2100790 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:53:01.586476 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:01.586502 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:01.586512 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.586516 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.589794 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:01.602999 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:01.603032 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:01.603044 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.603048 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.606162 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.086384 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:02.086410 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:02.086421 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.086426 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.089730 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.090543 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:02.090564 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:02.090574 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.090578 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.093286 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.094011 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:02.586374 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:02.586398 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:02.586407 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.586428 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.591075 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:02.592523 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:02.592553 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:02.592571 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.592582 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.595636 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:03.086220 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:03.086252 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:03.086263 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.086269 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.089356 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:03.090129 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:03.090151 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:03.090161 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.090168 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.092945 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.585684 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:03.585705 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:03.585715 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.585718 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.588695 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.589311 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:03.589331 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:03.589341 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.589345 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.592384 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:04.085500 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:04.085524 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:04.085535 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.085539 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.089160 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:04.089862 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:04.089882 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:04.089893 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.089899 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.092731 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:04.586190 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:04.586210 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:04.586220 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.586225 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.589572 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:04.590394 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:04.590409 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:04.590417 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.590421 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.593236 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:04.593773 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:05.085487 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:05.085516 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:05.085527 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.085531 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.088925 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:05.089832 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:05.089853 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:05.089863 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.089869 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.092833 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:05.586027 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:05.586050 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:05.586059 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.586063 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.590347 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:05.591941 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:05.591969 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:05.591978 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.591985 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.597384 2100790 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:53:06.086344 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:06.086369 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:06.086380 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.086385 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.089638 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:06.090417 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:06.090435 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:06.090446 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.090451 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.093217 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:06.585504 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:06.585526 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:06.585536 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.585540 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.588458 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:06.589734 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:06.589753 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:06.589763 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.589782 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.592575 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:07.085516 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:07.085539 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:07.085549 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:07.085554 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:07.088961 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:07.089950 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:07.089971 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:07.089981 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:07.089987 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:07.092680 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:07.093401 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:07.585556 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:07.585581 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:07.585591 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:07.585596 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:07.589086 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:07.590010 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:07.590031 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:07.590041 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:07.590046 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:07.592854 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:08.085522 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:08.085545 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:08.085558 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:08.085563 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:08.088913 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:08.089798 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:08.089819 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:08.089829 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:08.089833 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:08.092773 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:08.585479 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:08.585502 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:08.585512 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:08.585517 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:08.588565 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:08.589189 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:08.589198 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:08.589207 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:08.589211 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:08.591841 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:09.085999 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:09.086020 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:09.086029 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:09.086034 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:09.089276 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:09.089982 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:09.089994 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:09.090003 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:09.090011 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:09.093611 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:09.094291 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:09.585630 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:09.585651 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:09.585661 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:09.585667 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:09.588738 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:09.589386 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:09.589397 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:09.589407 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:09.589412 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:09.592087 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:10.085952 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:10.085975 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:10.085985 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:10.085990 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:10.090002 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:10.091096 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:10.091125 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:10.091135 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:10.091142 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:10.094853 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:10.586045 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:10.586074 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:10.586082 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:10.586086 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:10.588861 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:10.589484 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:10.589493 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:10.589503 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:10.589508 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:10.591887 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:11.085588 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:11.085613 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:11.085623 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:11.085628 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:11.090142 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:11.092123 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:11.092144 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:11.092161 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:11.092167 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:11.107741 2100790 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 10:53:11.108712 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:11.585937 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:11.585970 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:11.585981 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:11.585985 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:11.588955 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:11.589579 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:11.589589 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:11.589598 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:11.589602 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:11.592293 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:12.086017 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:12.086044 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:12.086055 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:12.086061 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:12.089340 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:12.090125 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:12.090144 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:12.090155 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:12.090159 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:12.093762 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:12.585520 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:12.585544 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:12.585554 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:12.585558 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:12.588569 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:12.589630 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:12.589649 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:12.589659 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:12.589666 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:12.592720 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:13.085472 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:13.085493 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:13.085503 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:13.085507 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:13.088576 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:13.089559 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:13.089574 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:13.089587 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:13.089592 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:13.092798 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:13.586411 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:13.586433 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:13.586443 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:13.586447 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:13.589409 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:13.590052 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:13.590076 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:13.590087 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:13.590092 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:13.593120 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:13.593616 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:14.085905 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:14.085929 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:14.085939 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:14.085945 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:14.090810 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:14.091934 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:14.091954 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:14.091964 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:14.091970 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:14.094918 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:14.586029 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:14.586050 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:14.586060 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:14.586072 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:14.589336 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:14.590151 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:14.590169 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:14.590179 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:14.590183 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:14.593127 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:15.086009 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:15.086033 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.086044 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.086048 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.092809 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:53:15.093797 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:15.093827 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.093838 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.093848 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.116532 2100790 round_trippers.go:574] Response Status: 200 OK in 22 milliseconds
	I0916 10:53:15.117038 2100790 pod_ready.go:93] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.117064 2100790 pod_ready.go:82] duration metric: took 15.031795779s for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.117097 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.117172 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:53:15.117181 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.117189 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.117204 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.120370 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.121805 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:15.121829 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.121840 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.121846 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.125356 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.126239 2100790 pod_ready.go:93] pod "kube-apiserver-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.126264 2100790 pod_ready.go:82] duration metric: took 9.154626ms for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.126277 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.126345 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:53:15.126355 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.126363 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.126369 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.130022 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.130930 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:15.130990 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.131015 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.131035 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.134272 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.134952 2100790 pod_ready.go:93] pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.134979 2100790 pod_ready.go:82] duration metric: took 8.694871ms for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.134992 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.135070 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:53:15.135080 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.135097 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.135106 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.138404 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.139433 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:15.139454 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.139464 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.139468 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.142665 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.143525 2100790 pod_ready.go:93] pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.143547 2100790 pod_ready.go:82] duration metric: took 8.548303ms for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.143558 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.143623 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:53:15.143634 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.143642 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.143647 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.146853 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.147936 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:15.147998 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.148021 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.148041 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.151123 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.151930 2100790 pod_ready.go:93] pod "kube-controller-manager-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.152038 2100790 pod_ready.go:82] duration metric: took 8.470979ms for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.152065 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.286488 2100790 request.go:632] Waited for 134.323311ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:53:15.286564 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:53:15.286574 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.286583 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.286592 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.289913 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.486167 2100790 request.go:632] Waited for 195.243315ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:15.486320 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:15.486347 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.486370 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.486388 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.489669 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.490324 2100790 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.490346 2100790 pod_ready.go:82] duration metric: took 338.258699ms for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.490359 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.686344 2100790 request.go:632] Waited for 195.902732ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:53:15.686427 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:53:15.686440 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.686450 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.686455 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.689472 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:15.886562 2100790 request.go:632] Waited for 196.35073ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:15.886651 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:15.886661 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.886671 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.886747 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.889787 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.890339 2100790 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.890358 2100790 pod_ready.go:82] duration metric: took 399.991383ms for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.890370 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:16.086966 2100790 request.go:632] Waited for 196.474939ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:53:16.087068 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:53:16.087079 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:16.087087 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:16.087093 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:16.090186 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:16.286669 2100790 request.go:632] Waited for 195.729737ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:16.286761 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:16.286767 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:16.286777 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:16.286785 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:16.289535 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:16.290048 2100790 pod_ready.go:93] pod "kube-proxy-f4jm2" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:16.290074 2100790 pod_ready.go:82] duration metric: took 399.696985ms for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:16.290098 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:16.486589 2100790 request.go:632] Waited for 196.411858ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:53:16.486668 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:53:16.486753 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:16.486774 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:16.486779 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:16.489854 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:16.686773 2100790 request.go:632] Waited for 196.227408ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:16.686828 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:16.686834 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:16.686844 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:16.686850 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:16.690031 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:16.690559 2100790 pod_ready.go:93] pod "kube-proxy-gwdl4" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:16.690579 2100790 pod_ready.go:82] duration metric: took 400.468172ms for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:16.690590 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:16.886347 2100790 request.go:632] Waited for 195.64899ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:53:16.886437 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:53:16.886449 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:16.886458 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:16.886465 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:16.889625 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:17.086611 2100790 request.go:632] Waited for 196.218998ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:17.086746 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:17.086760 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:17.086770 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:17.086775 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:17.089919 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:17.090750 2100790 pod_ready.go:93] pod "kube-proxy-qrdxc" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:17.090774 2100790 pod_ready.go:82] duration metric: took 400.17672ms for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:17.090786 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:17.286333 2100790 request.go:632] Waited for 195.472589ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:53:17.286426 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:53:17.286438 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:17.286449 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:17.286454 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:17.289368 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:17.486351 2100790 request.go:632] Waited for 196.330841ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:17.486414 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:17.486425 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:17.486434 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:17.486440 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:17.490240 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:17.491312 2100790 pod_ready.go:93] pod "kube-scheduler-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:17.491376 2100790 pod_ready.go:82] duration metric: took 400.581132ms for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:17.491403 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:17.686830 2100790 request.go:632] Waited for 195.345518ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:53:17.686978 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:53:17.687004 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:17.687028 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:17.687045 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:17.690176 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:17.886517 2100790 request.go:632] Waited for 195.373644ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:17.886579 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:17.886593 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:17.886601 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:17.886605 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:17.889635 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:17.890186 2100790 pod_ready.go:93] pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:17.890206 2100790 pod_ready.go:82] duration metric: took 398.784704ms for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:17.890219 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:18.086990 2100790 request.go:632] Waited for 196.692874ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:53:18.087049 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:53:18.087056 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.087065 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.087075 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.090267 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:18.286299 2100790 request.go:632] Waited for 195.282207ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:18.286378 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:18.286406 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.286422 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.286427 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.289318 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:18.289844 2100790 pod_ready.go:93] pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:18.289861 2100790 pod_ready.go:82] duration metric: took 399.626602ms for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:18.289873 2100790 pod_ready.go:39] duration metric: took 18.282026231s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:53:18.289893 2100790 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:53:18.289958 2100790 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:53:18.302372 2100790 api_server.go:72] duration metric: took 18.557963761s to wait for apiserver process to appear ...
	I0916 10:53:18.302451 2100790 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:53:18.302482 2100790 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:53:18.310422 2100790 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:53:18.310497 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:53:18.310508 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.310528 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.310533 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.311599 2100790 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:53:18.311656 2100790 api_server.go:141] control plane version: v1.31.1
	I0916 10:53:18.311674 2100790 api_server.go:131] duration metric: took 9.20923ms to wait for apiserver health ...
	I0916 10:53:18.311682 2100790 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:53:18.487060 2100790 request.go:632] Waited for 175.31082ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:18.487169 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:18.487184 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.487194 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.487198 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.493988 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:53:18.503268 2100790 system_pods.go:59] 24 kube-system pods found
	I0916 10:53:18.503307 2100790 system_pods.go:61] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:53:18.503315 2100790 system_pods.go:61] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:53:18.503320 2100790 system_pods.go:61] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:53:18.503333 2100790 system_pods.go:61] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:53:18.503342 2100790 system_pods.go:61] "etcd-ha-234759-m03" [701fa3e0-9014-4f92-9734-6b708cb56aa0] Running
	I0916 10:53:18.503346 2100790 system_pods.go:61] "kindnet-jhkc5" [59ea0a34-9a2b-44f2-901f-2bcc13b91a1b] Running
	I0916 10:53:18.503353 2100790 system_pods.go:61] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:53:18.503357 2100790 system_pods.go:61] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:53:18.503364 2100790 system_pods.go:61] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:53:18.503368 2100790 system_pods.go:61] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:53:18.503371 2100790 system_pods.go:61] "kube-apiserver-ha-234759-m03" [136cdbc8-ac04-4ca9-85a2-17ff91df20c0] Running
	I0916 10:53:18.503376 2100790 system_pods.go:61] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:53:18.503383 2100790 system_pods.go:61] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:53:18.503387 2100790 system_pods.go:61] "kube-controller-manager-ha-234759-m03" [61a9f46c-7889-49f1-afe7-1bbce98f0b5a] Running
	I0916 10:53:18.503391 2100790 system_pods.go:61] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:53:18.503402 2100790 system_pods.go:61] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:53:18.503406 2100790 system_pods.go:61] "kube-proxy-qrdxc" [d08c1deb-9b33-4ec1-b15e-6dce465c00d9] Running
	I0916 10:53:18.503409 2100790 system_pods.go:61] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:53:18.503413 2100790 system_pods.go:61] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:53:18.503417 2100790 system_pods.go:61] "kube-scheduler-ha-234759-m03" [deab7491-330f-496f-985f-3b1882eeeb60] Running
	I0916 10:53:18.503423 2100790 system_pods.go:61] "kube-vip-ha-234759" [41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e] Running
	I0916 10:53:18.503428 2100790 system_pods.go:61] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:53:18.503433 2100790 system_pods.go:61] "kube-vip-ha-234759-m03" [44f3fbf3-8803-4d2f-884c-470cdc81e835] Running
	I0916 10:53:18.503437 2100790 system_pods.go:61] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:53:18.503446 2100790 system_pods.go:74] duration metric: took 191.758004ms to wait for pod list to return data ...
	I0916 10:53:18.503457 2100790 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:53:18.686875 2100790 request.go:632] Waited for 183.327132ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:53:18.686934 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:53:18.686940 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.686948 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.686955 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.690563 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:18.690979 2100790 default_sa.go:45] found service account: "default"
	I0916 10:53:18.691005 2100790 default_sa.go:55] duration metric: took 187.537042ms for default service account to be created ...
	I0916 10:53:18.691015 2100790 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:53:18.886349 2100790 request.go:632] Waited for 195.266535ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:18.886417 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:18.886424 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.886438 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.886447 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.892680 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:53:18.901986 2100790 system_pods.go:86] 24 kube-system pods found
	I0916 10:53:18.903207 2100790 system_pods.go:89] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:53:18.903237 2100790 system_pods.go:89] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:53:18.903268 2100790 system_pods.go:89] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:53:18.903292 2100790 system_pods.go:89] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:53:18.903313 2100790 system_pods.go:89] "etcd-ha-234759-m03" [701fa3e0-9014-4f92-9734-6b708cb56aa0] Running
	I0916 10:53:18.903336 2100790 system_pods.go:89] "kindnet-jhkc5" [59ea0a34-9a2b-44f2-901f-2bcc13b91a1b] Running
	I0916 10:53:18.903354 2100790 system_pods.go:89] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:53:18.903389 2100790 system_pods.go:89] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:53:18.903408 2100790 system_pods.go:89] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:53:18.903428 2100790 system_pods.go:89] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:53:18.903446 2100790 system_pods.go:89] "kube-apiserver-ha-234759-m03" [136cdbc8-ac04-4ca9-85a2-17ff91df20c0] Running
	I0916 10:53:18.903474 2100790 system_pods.go:89] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:53:18.903498 2100790 system_pods.go:89] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:53:18.903802 2100790 system_pods.go:89] "kube-controller-manager-ha-234759-m03" [61a9f46c-7889-49f1-afe7-1bbce98f0b5a] Running
	I0916 10:53:18.903839 2100790 system_pods.go:89] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:53:18.908336 2100790 system_pods.go:89] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:53:18.909417 2100790 system_pods.go:89] "kube-proxy-qrdxc" [d08c1deb-9b33-4ec1-b15e-6dce465c00d9] Running
	I0916 10:53:18.909440 2100790 system_pods.go:89] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:53:18.909459 2100790 system_pods.go:89] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:53:18.909491 2100790 system_pods.go:89] "kube-scheduler-ha-234759-m03" [deab7491-330f-496f-985f-3b1882eeeb60] Running
	I0916 10:53:18.909515 2100790 system_pods.go:89] "kube-vip-ha-234759" [41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e] Running
	I0916 10:53:18.909534 2100790 system_pods.go:89] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:53:18.909549 2100790 system_pods.go:89] "kube-vip-ha-234759-m03" [44f3fbf3-8803-4d2f-884c-470cdc81e835] Running
	I0916 10:53:18.909567 2100790 system_pods.go:89] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:53:18.909599 2100790 system_pods.go:126] duration metric: took 218.575231ms to wait for k8s-apps to be running ...
	I0916 10:53:18.909628 2100790 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:53:18.909725 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:53:18.924027 2100790 system_svc.go:56] duration metric: took 14.389539ms WaitForService to wait for kubelet
	I0916 10:53:18.924100 2100790 kubeadm.go:582] duration metric: took 19.179694507s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:53:18.924151 2100790 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:53:19.086539 2100790 request.go:632] Waited for 162.294484ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:53:19.086598 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:53:19.086603 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:19.086612 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:19.086621 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:19.090275 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:19.091484 2100790 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:53:19.091512 2100790 node_conditions.go:123] node cpu capacity is 2
	I0916 10:53:19.091523 2100790 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:53:19.091529 2100790 node_conditions.go:123] node cpu capacity is 2
	I0916 10:53:19.091533 2100790 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:53:19.091538 2100790 node_conditions.go:123] node cpu capacity is 2
	I0916 10:53:19.091543 2100790 node_conditions.go:105] duration metric: took 167.371514ms to run NodePressure ...
	I0916 10:53:19.091559 2100790 start.go:241] waiting for startup goroutines ...
	I0916 10:53:19.091586 2100790 start.go:255] writing updated cluster config ...
	I0916 10:53:19.091920 2100790 ssh_runner.go:195] Run: rm -f paused
	I0916 10:53:19.101498 2100790 out.go:177] * Done! kubectl is now configured to use "ha-234759" cluster and "default" namespace by default
	E0916 10:53:19.103908 2100790 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	7e2e09055b617       89a35e2ebb6b9       58 seconds ago      Running             busybox                   0                   59f93d3cf6ebc       busybox-7dff88458-kjr9x
	0e47bf675d7df       2f6c962e7b831       2 minutes ago       Running             coredns                   0                   297acf9bc71e4       coredns-7c65d6cfc9-2l4br
	e629c24c41e32       2f6c962e7b831       2 minutes ago       Running             coredns                   0                   88f003522915c       coredns-7c65d6cfc9-vqj8q
	2586d6167e755       ba04bb24b9575       2 minutes ago       Running             storage-provisioner       0                   97c9faf1ef6b9       storage-provisioner
	7d51a8f7f42ff       6a23fa8fd2b78       2 minutes ago       Running             kindnet-cni               0                   ae1a0829d833f       kindnet-q8nl6
	900d2ad5148fe       24a140c548c07       2 minutes ago       Running             kube-proxy                0                   4e5ecfb50c3cd       kube-proxy-gwdl4
	33346b72c3ec8       7e2a4e229620b       2 minutes ago       Running             kube-vip                  0                   0a388c673a7b6       kube-vip-ha-234759
	324a547043689       27e3830e14027       2 minutes ago       Running             etcd                      0                   fa0cb25bfd24a       etcd-ha-234759
	5a7d53b11a05f       7f8aa378bb47d       2 minutes ago       Running             kube-scheduler            0                   56f2eb27f3396       kube-scheduler-ha-234759
	a7002833ce71b       279f381cb3736       2 minutes ago       Running             kube-controller-manager   0                   d4248c2bf66dc       kube-controller-manager-ha-234759
	fd48034050bae       d3f53a98c0a9d       2 minutes ago       Running             kube-apiserver            0                   2d1650dd1ced5       kube-apiserver-ha-234759
	
	
	==> containerd <==
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.455045464Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.455058214Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.455142702Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.600534643Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-7c65d6cfc9-2l4br,Uid:18d893a2-274a-413e-bf3d-0dd1e88a9984,Namespace:kube-system,Attempt:0,} returns sandbox id \"297acf9bc71e47516551a5df82eefb57a4c11f5167fc46f762281843c0061c49\""
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.608009960Z" level=info msg="CreateContainer within sandbox \"297acf9bc71e47516551a5df82eefb57a4c11f5167fc46f762281843c0061c49\" for container &ContainerMetadata{Name:coredns,Attempt:0,}"
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.632002179Z" level=info msg="CreateContainer within sandbox \"297acf9bc71e47516551a5df82eefb57a4c11f5167fc46f762281843c0061c49\" for &ContainerMetadata{Name:coredns,Attempt:0,} returns container id \"0e47bf675d7dfaa397cb12c3a975c0fdae535cdf7989381139d757ac5f8a5eaf\""
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.635432796Z" level=info msg="StartContainer for \"0e47bf675d7dfaa397cb12c3a975c0fdae535cdf7989381139d757ac5f8a5eaf\""
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.766728481Z" level=info msg="StartContainer for \"0e47bf675d7dfaa397cb12c3a975c0fdae535cdf7989381139d757ac5f8a5eaf\" returns successfully"
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.025128980Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-7dff88458-kjr9x,Uid:18a7f530-b34d-413a-9028-13511f5b9be6,Namespace:default,Attempt:0,}"
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.172814800Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.172963115Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.173004165Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.173141527Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.312044969Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-7dff88458-kjr9x,Uid:18a7f530-b34d-413a-9028-13511f5b9be6,Namespace:default,Attempt:0,} returns sandbox id \"59f93d3cf6ebc1a2852f89874b510bf9172a820a16c52d9b9ad1b24fcdfc4bea\""
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.317796925Z" level=info msg="PullImage \"gcr.io/k8s-minikube/busybox:1.28\""
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.213571160Z" level=info msg="ImageCreate event name:\"gcr.io/k8s-minikube/busybox:1.28\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.216616589Z" level=info msg="stop pulling image gcr.io/k8s-minikube/busybox:1.28: active requests=0, bytes read=766310"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.222099534Z" level=info msg="ImageCreate event name:\"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.226269862Z" level=info msg="ImageCreate event name:\"gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.227094678Z" level=info msg="Pulled image \"gcr.io/k8s-minikube/busybox:1.28\" with image id \"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\", repo tag \"gcr.io/k8s-minikube/busybox:1.28\", repo digest \"gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12\", size \"764554\" in 1.909236313s"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.227135785Z" level=info msg="PullImage \"gcr.io/k8s-minikube/busybox:1.28\" returns image reference \"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\""
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.233718554Z" level=info msg="CreateContainer within sandbox \"59f93d3cf6ebc1a2852f89874b510bf9172a820a16c52d9b9ad1b24fcdfc4bea\" for container &ContainerMetadata{Name:busybox,Attempt:0,}"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.255235031Z" level=info msg="CreateContainer within sandbox \"59f93d3cf6ebc1a2852f89874b510bf9172a820a16c52d9b9ad1b24fcdfc4bea\" for &ContainerMetadata{Name:busybox,Attempt:0,} returns container id \"7e2e09055b61790f45d02e3260c05daee3be55c8b2fc6527f43462f3b1cc91f3\""
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.256459335Z" level=info msg="StartContainer for \"7e2e09055b61790f45d02e3260c05daee3be55c8b2fc6527f43462f3b1cc91f3\""
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.317952234Z" level=info msg="StartContainer for \"7e2e09055b61790f45d02e3260c05daee3be55c8b2fc6527f43462f3b1cc91f3\" returns successfully"
	
	
	==> coredns [0e47bf675d7dfaa397cb12c3a975c0fdae535cdf7989381139d757ac5f8a5eaf] <==
	[INFO] 10.244.2.2:52667 - 3 "AAAA IN kubernetes.io. udp 31 false 512" NOERROR qr,rd,ra 31 0.001304304s
	[INFO] 10.244.1.2:37083 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000164668s
	[INFO] 10.244.1.2:49488 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.00026353s
	[INFO] 10.244.1.2:56285 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.002384855s
	[INFO] 10.244.1.2:35002 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000170698s
	[INFO] 10.244.1.2:50858 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000131306s
	[INFO] 10.244.0.4:38621 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000076611s
	[INFO] 10.244.0.4:36661 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001075152s
	[INFO] 10.244.0.4:53651 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000141973s
	[INFO] 10.244.2.2:45377 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000162452s
	[INFO] 10.244.2.2:43234 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000100759s
	[INFO] 10.244.1.2:43502 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000106273s
	[INFO] 10.244.0.4:55514 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000173914s
	[INFO] 10.244.0.4:55773 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000884467s
	[INFO] 10.244.0.4:41665 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000052447s
	[INFO] 10.244.2.2:41797 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000094014s
	[INFO] 10.244.2.2:36525 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000069365s
	[INFO] 10.244.2.2:43068 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000061341s
	[INFO] 10.244.1.2:60478 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000124816s
	[INFO] 10.244.1.2:59811 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000107117s
	[INFO] 10.244.0.4:38611 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000104008s
	[INFO] 10.244.0.4:58312 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000183285s
	[INFO] 10.244.0.4:37216 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000090026s
	[INFO] 10.244.2.2:35594 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000105205s
	[INFO] 10.244.2.2:35249 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000187798s
	
	
	==> coredns [e629c24c41e32603dc9a53125aa7122a5a0c58d985e95165cffe89d5670988c4] <==
	[INFO] 10.244.2.2:55324 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 44 0.001285121s
	[INFO] 10.244.1.2:48295 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.002432526s
	[INFO] 10.244.1.2:55741 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000227814s
	[INFO] 10.244.1.2:36649 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000202985s
	[INFO] 10.244.0.4:56384 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000102695s
	[INFO] 10.244.0.4:40529 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.00123465s
	[INFO] 10.244.0.4:54004 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000137197s
	[INFO] 10.244.0.4:51298 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000069636s
	[INFO] 10.244.0.4:46099 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000151031s
	[INFO] 10.244.2.2:44482 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001418231s
	[INFO] 10.244.2.2:41395 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000067806s
	[INFO] 10.244.2.2:34678 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000130428s
	[INFO] 10.244.2.2:45582 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001555683s
	[INFO] 10.244.2.2:39632 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000067101s
	[INFO] 10.244.2.2:46573 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00010962s
	[INFO] 10.244.1.2:45854 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000124578s
	[INFO] 10.244.1.2:52505 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000082141s
	[INFO] 10.244.1.2:54504 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000106543s
	[INFO] 10.244.0.4:43966 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000198588s
	[INFO] 10.244.2.2:57482 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000082715s
	[INFO] 10.244.1.2:42996 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000114255s
	[INFO] 10.244.1.2:54974 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.00016132s
	[INFO] 10.244.0.4:36323 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000157373s
	[INFO] 10.244.2.2:48775 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000152442s
	[INFO] 10.244.2.2:50527 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000112869s
	
	
	==> describe nodes <==
	Name:               ha-234759
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_51_47_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:51:45 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:54:18 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:53:49 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:53:49 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:53:49 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:53:49 +0000   Mon, 16 Sep 2024 10:51:46 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    ha-234759
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 9135db83b7fa4a89a9709e47daa481e7
	  System UUID:                2a58ed5f-69e8-4ab8-a10e-2a95cf1d9dec
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (11 in total)
	  Namespace                   Name                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                 ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-kjr9x              0 (0%)        0 (0%)      0 (0%)           0 (0%)         62s
	  kube-system                 coredns-7c65d6cfc9-2l4br             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     2m31s
	  kube-system                 coredns-7c65d6cfc9-vqj8q             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     2m31s
	  kube-system                 etcd-ha-234759                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         2m36s
	  kube-system                 kindnet-q8nl6                        100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      2m31s
	  kube-system                 kube-apiserver-ha-234759             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m36s
	  kube-system                 kube-controller-manager-ha-234759    200m (10%)    0 (0%)      0 (0%)           0 (0%)         2m36s
	  kube-system                 kube-proxy-gwdl4                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m31s
	  kube-system                 kube-scheduler-ha-234759             100m (5%)     0 (0%)      0 (0%)           0 (0%)         2m36s
	  kube-system                 kube-vip-ha-234759                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m36s
	  kube-system                 storage-provisioner                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m30s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             290Mi (3%)  390Mi (4%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age    From             Message
	  ----     ------                   ----   ----             -------
	  Normal   Starting                 2m30s  kube-proxy       
	  Normal   Starting                 2m36s  kubelet          Starting kubelet.
	  Warning  CgroupV1                 2m36s  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  2m36s  kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeReady                2m36s  kubelet          Node ha-234759 status is now: NodeReady
	  Normal   NodeHasSufficientMemory  2m36s  kubelet          Node ha-234759 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    2m36s  kubelet          Node ha-234759 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m36s  kubelet          Node ha-234759 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           2m32s  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           2m1s   node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           77s    node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	
	
	Name:               ha-234759-m02
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_52_13_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:52:10 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:54:12 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:53:41 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:53:41 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:53:41 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:53:41 +0000   Mon, 16 Sep 2024 10:52:11 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.3
	  Hostname:    ha-234759-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 4c059f3dd1fe4e4db5c9a9fbf1993d3b
	  System UUID:                ee72b9d9-548d-49fb-8dc5-aa6839abad7f
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-7l4g7                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         62s
	  kube-system                 etcd-ha-234759-m02                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         2m10s
	  kube-system                 kindnet-svsnq                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      2m12s
	  kube-system                 kube-apiserver-ha-234759-m02             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m11s
	  kube-system                 kube-controller-manager-ha-234759-m02    200m (10%)    0 (0%)      0 (0%)           0 (0%)         2m10s
	  kube-system                 kube-proxy-f4jm2                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m12s
	  kube-system                 kube-scheduler-ha-234759-m02             100m (5%)     0 (0%)      0 (0%)           0 (0%)         2m11s
	  kube-system                 kube-vip-ha-234759-m02                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m7s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 2m4s                   kube-proxy       
	  Normal   RegisteredNode           2m12s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   Starting                 2m12s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 2m12s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  2m12s (x8 over 2m12s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    2m12s (x7 over 2m12s)  kubelet          Node ha-234759-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m12s (x7 over 2m12s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  2m12s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           2m1s                   node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   RegisteredNode           77s                    node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	
	
	Name:               ha-234759-m03
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_52_59_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:52:54 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759-m03
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:54:16 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:53:56 +0000   Mon, 16 Sep 2024 10:52:54 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:53:56 +0000   Mon, 16 Sep 2024 10:52:54 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:53:56 +0000   Mon, 16 Sep 2024 10:52:54 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:53:56 +0000   Mon, 16 Sep 2024 10:52:55 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.4
	  Hostname:    ha-234759-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 16dbc83e7476410281e7051b011cace5
	  System UUID:                97682363-4679-4dad-b2b1-8d6fd4a34715
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-m9lsb                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         62s
	  kube-system                 etcd-ha-234759-m03                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         85s
	  kube-system                 kindnet-jhkc5                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      88s
	  kube-system                 kube-apiserver-ha-234759-m03             250m (12%)    0 (0%)      0 (0%)           0 (0%)         85s
	  kube-system                 kube-controller-manager-ha-234759-m03    200m (10%)    0 (0%)      0 (0%)           0 (0%)         85s
	  kube-system                 kube-proxy-qrdxc                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         88s
	  kube-system                 kube-scheduler-ha-234759-m03             100m (5%)     0 (0%)      0 (0%)           0 (0%)         85s
	  kube-system                 kube-vip-ha-234759-m03                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         81s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 79s                kube-proxy       
	  Normal  NodeHasSufficientMemory  88s (x8 over 88s)  kubelet          Node ha-234759-m03 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    88s (x7 over 88s)  kubelet          Node ha-234759-m03 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     88s (x7 over 88s)  kubelet          Node ha-234759-m03 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  88s                kubelet          Updated Node Allocatable limit across pods
	  Normal  RegisteredNode           87s                node-controller  Node ha-234759-m03 event: Registered Node ha-234759-m03 in Controller
	  Normal  RegisteredNode           86s                node-controller  Node ha-234759-m03 event: Registered Node ha-234759-m03 in Controller
	  Normal  RegisteredNode           77s                node-controller  Node ha-234759-m03 event: Registered Node ha-234759-m03 in Controller
	
	
	Name:               ha-234759-m04
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759-m04
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_54_13_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:54:12 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759-m04
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:54:12 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:54:13 +0000   Mon, 16 Sep 2024 10:54:12 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:54:13 +0000   Mon, 16 Sep 2024 10:54:12 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:54:13 +0000   Mon, 16 Sep 2024 10:54:12 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:54:13 +0000   Mon, 16 Sep 2024 10:54:13 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.5
	  Hostname:    ha-234759-m04
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 a8ed48d03d3c49bab7b6f3dbb66aab2e
	  System UUID:                3f4e61b4-061e-4448-a9f3-3c0401d9b215
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.3.0/24
	PodCIDRs:                     10.244.3.0/24
	Non-terminated Pods:          (2 in total)
	  Namespace                   Name                CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                ------------  ----------  ---------------  -------------  ---
	  kube-system                 kindnet-lwtj4       100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      10s
	  kube-system                 kube-proxy-m84xg    0 (0%)        0 (0%)      0 (0%)           0 (0%)         10s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 7s                 kube-proxy       
	  Normal  NodeHasSufficientMemory  10s (x2 over 10s)  kubelet          Node ha-234759-m04 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    10s (x2 over 10s)  kubelet          Node ha-234759-m04 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     10s (x2 over 10s)  kubelet          Node ha-234759-m04 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  10s                kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeReady                9s                 kubelet          Node ha-234759-m04 status is now: NodeReady
	  Normal  RegisteredNode           7s                 node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal  RegisteredNode           7s                 node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal  RegisteredNode           6s                 node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [324a5470436890084c7d201c8b4f70a15952b517d95c7e4094491c0aafb39871] <==
	{"level":"info","ts":"2024-09-16T10:52:13.171759Z","caller":"membership/cluster.go:535","msg":"promote member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:52:13.171932Z","caller":"etcdserver/server.go:1996","msg":"applied a configuration change through raft","local-member-id":"aec36adc501070cc","raft-conf-change":"ConfChangeAddNode","raft-conf-change-node-id":"3b59db4913cc3eb9"}
	{"level":"info","ts":"2024-09-16T10:52:55.036858Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(4276690428076244665 12593026477526642892) learners=(10733080707237678864)"}
	{"level":"info","ts":"2024-09-16T10:52:55.037921Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","added-peer-id":"94f3900974800f10","added-peer-peer-urls":["https://192.168.49.4:2380"]}
	{"level":"info","ts":"2024-09-16T10:52:55.037974Z","caller":"rafthttp/peer.go:133","msg":"starting remote peer","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:52:55.038161Z","caller":"rafthttp/pipeline.go:72","msg":"started HTTP pipelining with remote peer","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:52:55.038436Z","caller":"rafthttp/peer.go:137","msg":"started remote peer","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:52:55.038461Z","caller":"rafthttp/transport.go:317","msg":"added remote peer","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10","remote-peer-urls":["https://192.168.49.4:2380"]}
	{"level":"info","ts":"2024-09-16T10:52:55.038489Z","caller":"etcdserver/server.go:1996","msg":"applied a configuration change through raft","local-member-id":"aec36adc501070cc","raft-conf-change":"ConfChangeAddLearnerNode","raft-conf-change-node-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:52:55.039948Z","caller":"rafthttp/stream.go:169","msg":"started stream writer with remote peer","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:52:55.040228Z","caller":"rafthttp/stream.go:169","msg":"started stream writer with remote peer","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:52:55.040249Z","caller":"rafthttp/stream.go:395","msg":"started stream reader with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:52:55.040284Z","caller":"rafthttp/stream.go:395","msg":"started stream reader with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"warn","ts":"2024-09-16T10:52:56.289148Z","caller":"etcdhttp/peer.go:150","msg":"failed to promote a member","member-id":"94f3900974800f10","error":"etcdserver: can only promote a learner member which is in sync with leader"}
	{"level":"info","ts":"2024-09-16T10:52:56.968157Z","caller":"rafthttp/peer_status.go:53","msg":"peer became active","peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:52:57.029892Z","caller":"rafthttp/stream.go:412","msg":"established TCP streaming connection with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:52:57.048215Z","caller":"rafthttp/stream.go:412","msg":"established TCP streaming connection with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:52:57.063502Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"94f3900974800f10","stream-type":"stream Message"}
	{"level":"info","ts":"2024-09-16T10:52:57.064153Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:52:57.063962Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"94f3900974800f10","stream-type":"stream MsgApp v2"}
	{"level":"info","ts":"2024-09-16T10:52:57.064481Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"warn","ts":"2024-09-16T10:52:57.272545Z","caller":"etcdhttp/peer.go:150","msg":"failed to promote a member","member-id":"94f3900974800f10","error":"etcdserver: can only promote a learner member which is in sync with leader"}
	{"level":"info","ts":"2024-09-16T10:52:58.294139Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(4276690428076244665 10733080707237678864 12593026477526642892)"}
	{"level":"info","ts":"2024-09-16T10:52:58.294311Z","caller":"membership/cluster.go:535","msg":"promote member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:52:58.294386Z","caller":"etcdserver/server.go:1996","msg":"applied a configuration change through raft","local-member-id":"aec36adc501070cc","raft-conf-change":"ConfChangeAddNode","raft-conf-change-node-id":"94f3900974800f10"}
	
	
	==> kernel <==
	 10:54:22 up 1 day, 14:36,  0 users,  load average: 2.74, 2.02, 1.63
	Linux ha-234759 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [7d51a8f7f42ff3becfe558f4f4801bec107af9426327a36d60c9cf3b27276148] <==
	I0916 10:53:42.823483       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:53:42.823525       1 main.go:299] handling current node
	I0916 10:53:42.823570       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:53:42.823617       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:53:52.824210       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:53:52.824259       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:53:52.824385       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:53:52.824393       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:53:52.824429       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:53:52.824436       1 main.go:299] handling current node
	I0916 10:54:02.827388       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:54:02.827421       1 main.go:299] handling current node
	I0916 10:54:02.827436       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:54:02.827442       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:54:02.827557       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:54:02.827564       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:54:12.822861       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:54:12.822941       1 main.go:299] handling current node
	I0916 10:54:12.822964       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:54:12.822978       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:54:12.823164       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:54:12.823183       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:54:12.823238       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:54:12.823244       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:54:12.823320       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.3.0/24 Src: <nil> Gw: 192.168.49.5 Flags: [] Table: 0} 
	
	
	==> kube-apiserver [fd48034050bae874c9e190debc0b1bfa138cdf53fc5887bd15f027e7346ca82d] <==
	I0916 10:51:44.563921       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 10:51:45.367599       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:51:45.427785       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:51:45.557714       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 10:51:45.564282       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2]
	I0916 10:51:45.565434       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:51:45.571193       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:51:45.591261       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:51:46.453614       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:51:46.467333       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 10:51:46.479997       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:51:51.170135       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 10:51:51.345002       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	E0916 10:53:51.206555       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34472: use of closed network connection
	E0916 10:53:51.712393       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34496: use of closed network connection
	E0916 10:53:51.999923       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34514: use of closed network connection
	E0916 10:53:52.251265       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34536: use of closed network connection
	E0916 10:53:52.485147       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34550: use of closed network connection
	E0916 10:53:52.972588       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34588: use of closed network connection
	E0916 10:53:53.602153       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34646: use of closed network connection
	E0916 10:53:53.850431       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34666: use of closed network connection
	E0916 10:53:54.095517       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34690: use of closed network connection
	E0916 10:53:54.340408       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34702: use of closed network connection
	E0916 10:53:54.582226       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34734: use of closed network connection
	E0916 10:53:54.851029       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34748: use of closed network connection
	
	
	==> kube-controller-manager [a7002833ce71be5c884b6b01ea4b5b23ca5c4dbd6a84cfaeed6b4d3e9829e35b] <==
	I0916 10:53:21.166206       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="64.763µs"
	I0916 10:53:23.592099       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="56.373498ms"
	I0916 10:53:23.592345       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="205.816µs"
	I0916 10:53:23.758331       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="22.334736ms"
	I0916 10:53:23.792433       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="33.570342ms"
	I0916 10:53:23.794530       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="1.638185ms"
	I0916 10:53:24.744495       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="76.439µs"
	I0916 10:53:25.590578       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	I0916 10:53:41.904648       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m02"
	I0916 10:53:49.302045       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759"
	I0916 10:53:50.414741       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="58.307681ms"
	I0916 10:53:50.499441       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="84.648734ms"
	I0916 10:53:50.499782       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="65.657µs"
	I0916 10:53:56.200551       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	E0916 10:54:12.039568       1 certificate_controller.go:151] "Unhandled Error" err="Sync csr-q5rd8 failed with : error updating signature for csr: Operation cannot be fulfilled on certificatesigningrequests.certificates.k8s.io \"csr-q5rd8\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:54:12.223078       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"ha-234759-m04\" does not exist"
	I0916 10:54:12.277872       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="ha-234759-m04" podCIDRs=["10.244.3.0/24"]
	I0916 10:54:12.278590       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:12.278799       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:12.685159       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:13.216047       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:13.275364       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:13.275441       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="ha-234759-m04"
	I0916 10:54:13.299972       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:15.629918       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="ha-234759-m04"
	
	
	==> kube-proxy [900d2ad5148fe65aefe93ce1d29763ab71494f94b5511bca347f273235ccc038] <==
	I0916 10:51:52.177892       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:51:52.290574       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:51:52.290638       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:51:52.352222       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:51:52.352350       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:51:52.354429       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:51:52.355024       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:51:52.355194       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:51:52.362196       1 config.go:199] "Starting service config controller"
	I0916 10:51:52.362316       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:51:52.362400       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:51:52.362458       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:51:52.363411       1 config.go:328] "Starting node config controller"
	I0916 10:51:52.365650       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:51:52.462531       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:51:52.462632       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:51:52.471949       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [5a7d53b11a05f48872cdd02a26e2074bdb1c6ee6e353ceb2ff9519faca117d67] <==
	E0916 10:53:20.488702       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-kjr9x\": pod busybox-7dff88458-kjr9x is already assigned to node \"ha-234759\"" pod="default/busybox-7dff88458-kjr9x"
	I0916 10:53:20.488719       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-kjr9x" node="ha-234759"
	E0916 10:53:20.489597       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-m9lsb\": pod busybox-7dff88458-m9lsb is already assigned to node \"ha-234759-m03\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-m9lsb" node="ha-234759-m03"
	E0916 10:53:20.489638       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod af6e1f4d-184c-4d9d-bed7-b49448f6daa9(default/busybox-7dff88458-m9lsb) wasn't assumed so cannot be forgotten" pod="default/busybox-7dff88458-m9lsb"
	E0916 10:53:20.489651       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-m9lsb\": pod busybox-7dff88458-m9lsb is already assigned to node \"ha-234759-m03\"" pod="default/busybox-7dff88458-m9lsb"
	I0916 10:53:20.489677       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-m9lsb" node="ha-234759-m03"
	E0916 10:54:12.393673       1 schedule_one.go:953] "Scheduler cache AssumePod failed" err="pod 10919f4b-06e2-4ba9-8ed7-6a6493352be5(kube-system/kube-proxy-xscmm) is in the cache, so can't be assumed" pod="kube-system/kube-proxy-xscmm"
	E0916 10:54:12.393715       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="pod 10919f4b-06e2-4ba9-8ed7-6a6493352be5(kube-system/kube-proxy-xscmm) is in the cache, so can't be assumed" pod="kube-system/kube-proxy-xscmm"
	I0916 10:54:12.393736       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-xscmm" node="ha-234759-m04"
	E0916 10:54:12.419351       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-sk6c5\": pod kindnet-sk6c5 is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kindnet-sk6c5" node="ha-234759-m04"
	E0916 10:54:12.419403       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 021eac3e-1cf0-40c0-a4e8-6bfe73a62a75(kube-system/kindnet-sk6c5) wasn't assumed so cannot be forgotten" pod="kube-system/kindnet-sk6c5"
	E0916 10:54:12.419578       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-sk6c5\": pod kindnet-sk6c5 is already assigned to node \"ha-234759-m04\"" pod="kube-system/kindnet-sk6c5"
	I0916 10:54:12.419725       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kindnet-sk6c5" node="ha-234759-m04"
	E0916 10:54:12.434533       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-m84xg\": pod kube-proxy-m84xg is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-m84xg" node="ha-234759-m04"
	E0916 10:54:12.434587       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 6b84ade0-ae34-4c7e-b1ed-e9c83f4bf130(kube-system/kube-proxy-m84xg) wasn't assumed so cannot be forgotten" pod="kube-system/kube-proxy-m84xg"
	E0916 10:54:12.434606       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-m84xg\": pod kube-proxy-m84xg is already assigned to node \"ha-234759-m04\"" pod="kube-system/kube-proxy-m84xg"
	I0916 10:54:12.434653       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-m84xg" node="ha-234759-m04"
	E0916 10:54:12.656470       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-c59dr\": pod kindnet-c59dr is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kindnet-c59dr" node="ha-234759-m04"
	E0916 10:54:12.656573       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 929f6efd-2b9a-4b18-919d-36fc692d45c4(kube-system/kindnet-c59dr) wasn't assumed so cannot be forgotten" pod="kube-system/kindnet-c59dr"
	E0916 10:54:12.656663       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-c59dr\": pod kindnet-c59dr is already assigned to node \"ha-234759-m04\"" pod="kube-system/kindnet-c59dr"
	I0916 10:54:12.656746       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kindnet-c59dr" node="ha-234759-m04"
	E0916 10:54:12.678394       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-zcn6b\": pod kube-proxy-zcn6b is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-zcn6b" node="ha-234759-m04"
	E0916 10:54:12.678469       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 66cac608-b205-4573-afb9-4e337fdadf3c(kube-system/kube-proxy-zcn6b) wasn't assumed so cannot be forgotten" pod="kube-system/kube-proxy-zcn6b"
	E0916 10:54:12.678502       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-zcn6b\": pod kube-proxy-zcn6b is already assigned to node \"ha-234759-m04\"" pod="kube-system/kube-proxy-zcn6b"
	I0916 10:54:12.678547       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-zcn6b" node="ha-234759-m04"
	
	
	==> kubelet <==
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.419239    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ee79653f-8a9e-41e2-82bb-7b08fc87e265-lib-modules\") pod \"kindnet-q8nl6\" (UID: \"ee79653f-8a9e-41e2-82bb-7b08fc87e265\") " pod="kube-system/kindnet-q8nl6"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.419287    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw9tx\" (UniqueName: \"kubernetes.io/projected/8ea118a7-cc54-4dd9-8bb2-cfc133a376fc-kube-api-access-dw9tx\") pod \"kube-proxy-gwdl4\" (UID: \"8ea118a7-cc54-4dd9-8bb2-cfc133a376fc\") " pod="kube-system/kube-proxy-gwdl4"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.520298    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/18d893a2-274a-413e-bf3d-0dd1e88a9984-config-volume\") pod \"coredns-7c65d6cfc9-2l4br\" (UID: \"18d893a2-274a-413e-bf3d-0dd1e88a9984\") " pod="kube-system/coredns-7c65d6cfc9-2l4br"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.520433    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c14618b-0831-4e8b-be9e-ba0049031bdb-config-volume\") pod \"coredns-7c65d6cfc9-vqj8q\" (UID: \"2c14618b-0831-4e8b-be9e-ba0049031bdb\") " pod="kube-system/coredns-7c65d6cfc9-vqj8q"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.520456    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gfn5\" (UniqueName: \"kubernetes.io/projected/18d893a2-274a-413e-bf3d-0dd1e88a9984-kube-api-access-2gfn5\") pod \"coredns-7c65d6cfc9-2l4br\" (UID: \"18d893a2-274a-413e-bf3d-0dd1e88a9984\") " pod="kube-system/coredns-7c65d6cfc9-2l4br"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.520480    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pkdh\" (UniqueName: \"kubernetes.io/projected/2c14618b-0831-4e8b-be9e-ba0049031bdb-kube-api-access-4pkdh\") pod \"coredns-7c65d6cfc9-vqj8q\" (UID: \"2c14618b-0831-4e8b-be9e-ba0049031bdb\") " pod="kube-system/coredns-7c65d6cfc9-vqj8q"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.555785    1577 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.870339    1577 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\": failed to find network info for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\""
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.870422    1577 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\": failed to find network info for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\"" pod="kube-system/coredns-7c65d6cfc9-vqj8q"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.870444    1577 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\": failed to find network info for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\"" pod="kube-system/coredns-7c65d6cfc9-vqj8q"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.870487    1577 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-vqj8q_kube-system(2c14618b-0831-4e8b-be9e-ba0049031bdb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-vqj8q_kube-system(2c14618b-0831-4e8b-be9e-ba0049031bdb)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\\\": failed to find network info for sandbox \\\"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\\\"\"" pod="kube-system/coredns-7c65d6cfc9-vqj8q" podUID="2c14618b-0831-4e8b-be9e-ba0049031bdb"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.903598    1577 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\": failed to find network info for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\""
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.903661    1577 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\": failed to find network info for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\"" pod="kube-system/coredns-7c65d6cfc9-2l4br"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.903683    1577 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\": failed to find network info for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\"" pod="kube-system/coredns-7c65d6cfc9-2l4br"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.903739    1577 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-2l4br_kube-system(18d893a2-274a-413e-bf3d-0dd1e88a9984)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-2l4br_kube-system(18d893a2-274a-413e-bf3d-0dd1e88a9984)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\\\": failed to find network info for sandbox \\\"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\\\"\"" pod="kube-system/coredns-7c65d6cfc9-2l4br" podUID="18d893a2-274a-413e-bf3d-0dd1e88a9984"
	Sep 16 10:51:52 ha-234759 kubelet[1577]: I0916 10:51:52.427403    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/e8924914-9ba5-4adc-ac46-9d3d97b0bc08-tmp\") pod \"storage-provisioner\" (UID: \"e8924914-9ba5-4adc-ac46-9d3d97b0bc08\") " pod="kube-system/storage-provisioner"
	Sep 16 10:51:52 ha-234759 kubelet[1577]: I0916 10:51:52.427993    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8mz2\" (UniqueName: \"kubernetes.io/projected/e8924914-9ba5-4adc-ac46-9d3d97b0bc08-kube-api-access-s8mz2\") pod \"storage-provisioner\" (UID: \"e8924914-9ba5-4adc-ac46-9d3d97b0bc08\") " pod="kube-system/storage-provisioner"
	Sep 16 10:51:52 ha-234759 kubelet[1577]: I0916 10:51:52.466980    1577 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-proxy-gwdl4" podStartSLOduration=1.466960435 podStartE2EDuration="1.466960435s" podCreationTimestamp="2024-09-16 10:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:51:52.466745839 +0000 UTC m=+6.217173255" watchObservedRunningTime="2024-09-16 10:51:52.466960435 +0000 UTC m=+6.217387900"
	Sep 16 10:51:53 ha-234759 kubelet[1577]: I0916 10:51:53.482284    1577 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kindnet-q8nl6" podStartSLOduration=2.482261961 podStartE2EDuration="2.482261961s" podCreationTimestamp="2024-09-16 10:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:51:52.512634019 +0000 UTC m=+6.263061435" watchObservedRunningTime="2024-09-16 10:51:53.482261961 +0000 UTC m=+7.232689377"
	Sep 16 10:51:53 ha-234759 kubelet[1577]: I0916 10:51:53.502715    1577 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/storage-provisioner" podStartSLOduration=1.502654063 podStartE2EDuration="1.502654063s" podCreationTimestamp="2024-09-16 10:51:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:51:53.48355399 +0000 UTC m=+7.233981414" watchObservedRunningTime="2024-09-16 10:51:53.502654063 +0000 UTC m=+7.253081478"
	Sep 16 10:51:57 ha-234759 kubelet[1577]: I0916 10:51:57.010572    1577 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 10:51:57 ha-234759 kubelet[1577]: I0916 10:51:57.011701    1577 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 10:52:04 ha-234759 kubelet[1577]: I0916 10:52:04.547013    1577 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-7c65d6cfc9-vqj8q" podStartSLOduration=13.546981888 podStartE2EDuration="13.546981888s" podCreationTimestamp="2024-09-16 10:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:52:04.522354317 +0000 UTC m=+18.272781733" watchObservedRunningTime="2024-09-16 10:52:04.546981888 +0000 UTC m=+18.297409304"
	Sep 16 10:52:05 ha-234759 kubelet[1577]: I0916 10:52:05.564407    1577 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-7c65d6cfc9-2l4br" podStartSLOduration=14.564378665 podStartE2EDuration="14.564378665s" podCreationTimestamp="2024-09-16 10:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:52:05.5311841 +0000 UTC m=+19.281611524" watchObservedRunningTime="2024-09-16 10:52:05.564378665 +0000 UTC m=+19.314806081"
	Sep 16 10:53:20 ha-234759 kubelet[1577]: I0916 10:53:20.569129    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdx8v\" (UniqueName: \"kubernetes.io/projected/18a7f530-b34d-413a-9028-13511f5b9be6-kube-api-access-wdx8v\") pod \"busybox-7dff88458-kjr9x\" (UID: \"18a7f530-b34d-413a-9028-13511f5b9be6\") " pod="default/busybox-7dff88458-kjr9x"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p ha-234759 -n ha-234759
helpers_test.go:261: (dbg) Run:  kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (563.746µs)
helpers_test.go:263: kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiControlPlane/serial/NodeLabels (3.05s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartSecondaryNode (21.47s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartSecondaryNode
ha_test.go:420: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 node start m02 -v=7 --alsologtostderr
ha_test.go:420: (dbg) Done: out/minikube-linux-arm64 -p ha-234759 node start m02 -v=7 --alsologtostderr: (17.356578889s)
ha_test.go:428: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr
ha_test.go:428: (dbg) Done: out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr: (1.163557793s)
ha_test.go:448: (dbg) Run:  kubectl get nodes
ha_test.go:448: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (824.553µs)
ha_test.go:450: failed to kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiControlPlane/serial/RestartSecondaryNode]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect ha-234759
helpers_test.go:235: (dbg) docker inspect ha-234759:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59",
	        "Created": "2024-09-16T10:51:26.447161448Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2101278,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:51:26.59587884Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/hostname",
	        "HostsPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/hosts",
	        "LogPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59-json.log",
	        "Name": "/ha-234759",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "ha-234759:/var",
	                "/lib/modules:/lib/modules:ro"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "ha-234759",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/merged",
	                "UpperDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/diff",
	                "WorkDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "volume",
	                "Name": "ha-234759",
	                "Source": "/var/lib/docker/volumes/ha-234759/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            },
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            }
	        ],
	        "Config": {
	            "Hostname": "ha-234759",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "ha-234759",
	                "name.minikube.sigs.k8s.io": "ha-234759",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "a4bc0758e88359f4099794ca40e8f9323bb5644b881b9b4ad2307dab2c5abb00",
	            "SandboxKey": "/var/run/docker/netns/a4bc0758e883",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40597"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40598"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40601"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40599"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40600"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "ha-234759": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "941929ec13d1e6034904933d29100a93cf04d9e6a30844d8d0c54e3a464c32cd",
	                    "EndpointID": "68222693ea4e7622a2bdfb3001db23b54719fbd194eac21b546910cc3e2062bd",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "ha-234759",
	                        "6306ac5a5985"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p ha-234759 -n ha-234759
helpers_test.go:244: <<< TestMultiControlPlane/serial/RestartSecondaryNode FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiControlPlane/serial/RestartSecondaryNode]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p ha-234759 logs -n 25: (1.74351807s)
helpers_test.go:252: TestMultiControlPlane/serial/RestartSecondaryNode logs: 
-- stdout --
	
	==> Audit <==
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| Command |                                       Args                                       |  Profile  |  User   | Version |     Start Time      |      End Time       |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m03:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759:/home/docker/cp-test_ha-234759-m03_ha-234759.txt                       |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759 sudo cat                                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m03_ha-234759.txt                                 |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m03:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m02:/home/docker/cp-test_ha-234759-m03_ha-234759-m02.txt               |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759-m02 sudo cat                                          | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m03_ha-234759-m02.txt                             |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m03:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04:/home/docker/cp-test_ha-234759-m03_ha-234759-m04.txt               |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759-m04 sudo cat                                          | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m03_ha-234759-m04.txt                             |           |         |         |                     |                     |
	| cp      | ha-234759 cp testdata/cp-test.txt                                                | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04:/home/docker/cp-test.txt                                           |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /tmp/TestMultiControlPlaneserialCopyFile3470256434/001/cp-test_ha-234759-m04.txt |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759:/home/docker/cp-test_ha-234759-m04_ha-234759.txt                       |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759 sudo cat                                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m04_ha-234759.txt                                 |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m02:/home/docker/cp-test_ha-234759-m04_ha-234759-m02.txt               |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759-m02 sudo cat                                          | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m04_ha-234759-m02.txt                             |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m03:/home/docker/cp-test_ha-234759-m04_ha-234759-m03.txt               |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759-m03 sudo cat                                          | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m04_ha-234759-m03.txt                             |           |         |         |                     |                     |
	| node    | ha-234759 node stop m02 -v=7                                                     | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | ha-234759 node start m02 -v=7                                                    | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:55 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:51:21
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:51:21.236740 2100790 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:51:21.236879 2100790 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:21.236887 2100790 out.go:358] Setting ErrFile to fd 2...
	I0916 10:51:21.236893 2100790 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:51:21.237140 2100790 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:51:21.237558 2100790 out.go:352] Setting JSON to false
	I0916 10:51:21.238489 2100790 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":138824,"bootTime":1726345058,"procs":173,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:51:21.238569 2100790 start.go:139] virtualization:  
	I0916 10:51:21.241135 2100790 out.go:177] * [ha-234759] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:51:21.243355 2100790 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:51:21.243404 2100790 notify.go:220] Checking for updates...
	I0916 10:51:21.247599 2100790 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:51:21.249305 2100790 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:51:21.251096 2100790 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:51:21.252852 2100790 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:51:21.254707 2100790 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:51:21.256640 2100790 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:51:21.284750 2100790 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:51:21.284894 2100790 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:21.353436 2100790 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:41 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 10:51:21.343927486 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:21.353553 2100790 docker.go:318] overlay module found
	I0916 10:51:21.355603 2100790 out.go:177] * Using the docker driver based on user configuration
	I0916 10:51:21.357141 2100790 start.go:297] selected driver: docker
	I0916 10:51:21.357154 2100790 start.go:901] validating driver "docker" against <nil>
	I0916 10:51:21.357168 2100790 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:51:21.357874 2100790 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:51:21.410168 2100790 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:41 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 10:51:21.400729656 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:51:21.410385 2100790 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:51:21.410615 2100790 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:51:21.412383 2100790 out.go:177] * Using Docker driver with root privileges
	I0916 10:51:21.413806 2100790 cni.go:84] Creating CNI manager for ""
	I0916 10:51:21.413874 2100790 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0916 10:51:21.413893 2100790 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:51:21.413976 2100790 start.go:340] cluster config:
	{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:contain
erd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock:
SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:21.416766 2100790 out.go:177] * Starting "ha-234759" primary control-plane node in "ha-234759" cluster
	I0916 10:51:21.418419 2100790 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:51:21.420056 2100790 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:51:21.421575 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:51:21.421630 2100790 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:51:21.421642 2100790 cache.go:56] Caching tarball of preloaded images
	I0916 10:51:21.421667 2100790 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:51:21.421729 2100790 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:51:21.421740 2100790 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:51:21.422099 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:51:21.422139 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json: {Name:mk1338431a5f691abe348219d4be1bbe2ed8cc31 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	W0916 10:51:21.441371 2100790 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:51:21.441393 2100790 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:51:21.441539 2100790 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:51:21.441563 2100790 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:51:21.441579 2100790 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:51:21.441588 2100790 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:51:21.441611 2100790 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:51:21.443229 2100790 image.go:273] response: 
	I0916 10:51:21.559332 2100790 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:51:21.559372 2100790 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:51:21.559403 2100790 start.go:360] acquireMachinesLock for ha-234759: {Name:mk07434fa5fb218c324ac4567510c65c6e772f63 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:51:21.559529 2100790 start.go:364] duration metric: took 106.461µs to acquireMachinesLock for "ha-234759"
	I0916 10:51:21.559560 2100790 start.go:93] Provisioning new machine with config: &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP: APIServerName:minikubeCA
APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false Custo
mQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:51:21.559641 2100790 start.go:125] createHost starting for "" (driver="docker")
	I0916 10:51:21.561629 2100790 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:51:21.561866 2100790 start.go:159] libmachine.API.Create for "ha-234759" (driver="docker")
	I0916 10:51:21.561908 2100790 client.go:168] LocalClient.Create starting
	I0916 10:51:21.562009 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 10:51:21.562046 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:21.562065 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:21.562129 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 10:51:21.562154 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:21.562170 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:21.562541 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 10:51:21.577993 2100790 cli_runner.go:211] docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 10:51:21.578076 2100790 network_create.go:284] running [docker network inspect ha-234759] to gather additional debugging logs...
	I0916 10:51:21.578098 2100790 cli_runner.go:164] Run: docker network inspect ha-234759
	W0916 10:51:21.593488 2100790 cli_runner.go:211] docker network inspect ha-234759 returned with exit code 1
	I0916 10:51:21.593523 2100790 network_create.go:287] error running [docker network inspect ha-234759]: docker network inspect ha-234759: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network ha-234759 not found
	I0916 10:51:21.593537 2100790 network_create.go:289] output of [docker network inspect ha-234759]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network ha-234759 not found
	
	** /stderr **
	I0916 10:51:21.593648 2100790 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:51:21.609508 2100790 network.go:206] using free private subnet 192.168.49.0/24: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x400164dff0}
	I0916 10:51:21.609552 2100790 network_create.go:124] attempt to create docker network ha-234759 192.168.49.0/24 with gateway 192.168.49.1 and MTU of 1500 ...
	I0916 10:51:21.609615 2100790 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.49.0/24 --gateway=192.168.49.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=ha-234759 ha-234759
	I0916 10:51:21.679986 2100790 network_create.go:108] docker network ha-234759 192.168.49.0/24 created
	I0916 10:51:21.680022 2100790 kic.go:121] calculated static IP "192.168.49.2" for the "ha-234759" container
	I0916 10:51:21.680096 2100790 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:51:21.696483 2100790 cli_runner.go:164] Run: docker volume create ha-234759 --label name.minikube.sigs.k8s.io=ha-234759 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:51:21.711932 2100790 oci.go:103] Successfully created a docker volume ha-234759
	I0916 10:51:21.712019 2100790 cli_runner.go:164] Run: docker run --rm --name ha-234759-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759 --entrypoint /usr/bin/test -v ha-234759:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:51:22.321948 2100790 oci.go:107] Successfully prepared a docker volume ha-234759
	I0916 10:51:22.322000 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:51:22.322024 2100790 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:51:22.322098 2100790 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:51:26.380680 2100790 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.058525981s)
	I0916 10:51:26.380727 2100790 kic.go:203] duration metric: took 4.058699338s to extract preloaded images to volume ...
	W0916 10:51:26.380867 2100790 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:51:26.380980 2100790 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:51:26.433008 2100790 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-234759 --name ha-234759 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-234759 --network ha-234759 --ip 192.168.49.2 --volume ha-234759:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:51:26.776518 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Running}}
	I0916 10:51:26.800922 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:26.824391 2100790 cli_runner.go:164] Run: docker exec ha-234759 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:51:26.885690 2100790 oci.go:144] the created container "ha-234759" has a running status.
	I0916 10:51:26.885718 2100790 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa...
	I0916 10:51:27.681475 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:51:27.681524 2100790 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:51:27.702247 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:27.734249 2100790 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:51:27.734269 2100790 kic_runner.go:114] Args: [docker exec --privileged ha-234759 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:51:27.800109 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:27.818823 2100790 machine.go:93] provisionDockerMachine start ...
	I0916 10:51:27.818911 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:27.847445 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:27.847725 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40597 <nil> <nil>}
	I0916 10:51:27.847735 2100790 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:51:27.995717 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759
	
	I0916 10:51:27.995740 2100790 ubuntu.go:169] provisioning hostname "ha-234759"
	I0916 10:51:27.995808 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:28.017912 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:28.018169 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40597 <nil> <nil>}
	I0916 10:51:28.018187 2100790 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759 && echo "ha-234759" | sudo tee /etc/hostname
	I0916 10:51:28.168561 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759
	
	I0916 10:51:28.168704 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:28.185728 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:28.185976 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40597 <nil> <nil>}
	I0916 10:51:28.185995 2100790 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:51:28.322997 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:51:28.323026 2100790 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:51:28.323054 2100790 ubuntu.go:177] setting up certificates
	I0916 10:51:28.323066 2100790 provision.go:84] configureAuth start
	I0916 10:51:28.323133 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:51:28.340179 2100790 provision.go:143] copyHostCerts
	I0916 10:51:28.340229 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:51:28.340268 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:51:28.340280 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:51:28.340369 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:51:28.340463 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:51:28.340485 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:51:28.340490 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:51:28.340526 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:51:28.340578 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:51:28.340599 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:51:28.340610 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:51:28.340638 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:51:28.340701 2100790 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759 san=[127.0.0.1 192.168.49.2 ha-234759 localhost minikube]
	I0916 10:51:28.658968 2100790 provision.go:177] copyRemoteCerts
	I0916 10:51:28.659040 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:51:28.659083 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:28.675856 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:28.775958 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:51:28.776026 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:51:28.800914 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:51:28.800979 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1196 bytes)
	I0916 10:51:28.825064 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:51:28.825129 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:51:28.849401 2100790 provision.go:87] duration metric: took 526.307956ms to configureAuth
	I0916 10:51:28.849431 2100790 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:51:28.849623 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:51:28.849637 2100790 machine.go:96] duration metric: took 1.030796915s to provisionDockerMachine
	I0916 10:51:28.849644 2100790 client.go:171] duration metric: took 7.287725736s to LocalClient.Create
	I0916 10:51:28.849658 2100790 start.go:167] duration metric: took 7.287792853s to libmachine.API.Create "ha-234759"
	I0916 10:51:28.849669 2100790 start.go:293] postStartSetup for "ha-234759" (driver="docker")
	I0916 10:51:28.849678 2100790 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:51:28.849734 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:51:28.849776 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:28.866397 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:28.967933 2100790 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:51:28.971430 2100790 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:51:28.971469 2100790 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:51:28.971503 2100790 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:51:28.971515 2100790 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:51:28.971527 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:51:28.971601 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:51:28.971690 2100790 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:51:28.971702 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:51:28.971816 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:51:28.980777 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:51:29.007858 2100790 start.go:296] duration metric: took 158.172578ms for postStartSetup
	I0916 10:51:29.008304 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:51:29.025527 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:51:29.025826 2100790 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:51:29.025880 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:29.042634 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:29.135593 2100790 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:51:29.139902 2100790 start.go:128] duration metric: took 7.580245826s to createHost
	I0916 10:51:29.139929 2100790 start.go:83] releasing machines lock for "ha-234759", held for 7.580385985s
	I0916 10:51:29.139999 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:51:29.157116 2100790 ssh_runner.go:195] Run: cat /version.json
	I0916 10:51:29.157159 2100790 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:51:29.157177 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:29.157227 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:29.174648 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:29.182986 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:29.266359 2100790 ssh_runner.go:195] Run: systemctl --version
	I0916 10:51:29.394985 2100790 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:51:29.399358 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:51:29.426234 2100790 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:51:29.426365 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:51:29.454426 2100790 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:51:29.454452 2100790 start.go:495] detecting cgroup driver to use...
	I0916 10:51:29.454484 2100790 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:51:29.454535 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:51:29.468955 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:51:29.484050 2100790 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:51:29.484149 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:51:29.498384 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:51:29.513595 2100790 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:51:29.623797 2100790 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:51:29.719649 2100790 docker.go:233] disabling docker service ...
	I0916 10:51:29.719789 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:51:29.742658 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:51:29.754648 2100790 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:51:29.851434 2100790 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:51:29.945582 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:51:29.957314 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:51:29.975118 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:51:29.986788 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:51:29.997691 2100790 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:51:29.997763 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:51:30.037137 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:51:30.051230 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:51:30.064188 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:51:30.078048 2100790 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:51:30.089944 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:51:30.103181 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:51:30.117182 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:51:30.130136 2100790 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:51:30.141349 2100790 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:51:30.151581 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:51:30.249574 2100790 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:51:30.374729 2100790 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:51:30.374852 2100790 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:51:30.378835 2100790 start.go:563] Will wait 60s for crictl version
	I0916 10:51:30.378986 2100790 ssh_runner.go:195] Run: which crictl
	I0916 10:51:30.382308 2100790 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:51:30.421985 2100790 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:51:30.422089 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:51:30.443819 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:51:30.469015 2100790 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:51:30.471164 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:51:30.485573 2100790 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:51:30.489292 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:51:30.500562 2100790 kubeadm.go:883] updating cluster {Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fals
e CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:51:30.500689 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:51:30.500765 2100790 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:51:30.539014 2100790 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:51:30.539036 2100790 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:51:30.539099 2100790 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:51:30.576121 2100790 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:51:30.576144 2100790 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:51:30.576152 2100790 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 containerd true true} ...
	I0916 10:51:30.576251 2100790 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:51:30.576319 2100790 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:51:30.612571 2100790 cni.go:84] Creating CNI manager for ""
	I0916 10:51:30.612656 2100790 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 10:51:30.612679 2100790 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:51:30.612734 2100790 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:ha-234759 NodeName:ha-234759 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kuberne
tes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:51:30.612917 2100790 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "ha-234759"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:51:30.612969 2100790 kube-vip.go:115] generating kube-vip config ...
	I0916 10:51:30.613057 2100790 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:51:30.625749 2100790 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:51:30.625852 2100790 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/super-admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:51:30.625916 2100790 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:51:30.635319 2100790 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:51:30.635394 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube /etc/kubernetes/manifests
	I0916 10:51:30.644381 2100790 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (313 bytes)
	I0916 10:51:30.663252 2100790 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:51:30.681992 2100790 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2163 bytes)
	I0916 10:51:30.700307 2100790 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1447 bytes)
	I0916 10:51:30.718380 2100790 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:51:30.721958 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:51:30.733158 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:51:30.830241 2100790 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:51:30.845600 2100790 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.2
	I0916 10:51:30.845675 2100790 certs.go:194] generating shared ca certs ...
	I0916 10:51:30.845705 2100790 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:30.845874 2100790 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:51:30.845952 2100790 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:51:30.845987 2100790 certs.go:256] generating profile certs ...
	I0916 10:51:30.846069 2100790 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key
	I0916 10:51:30.846123 2100790 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt with IP's: []
	I0916 10:51:31.363342 2100790 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt ...
	I0916 10:51:31.363377 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt: {Name:mka4287c7f3ffd0700fbbe62e3c68b161d88d3cd Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.363621 2100790 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key ...
	I0916 10:51:31.363636 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key: {Name:mkf7a053e687a2a442d5973b40203c0ed8dfdc9d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.363738 2100790 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.a444410f
	I0916 10:51:31.363757 2100790 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.a444410f with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.254]
	I0916 10:51:31.666932 2100790 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.a444410f ...
	I0916 10:51:31.666964 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.a444410f: {Name:mk0cd492e2d5f75656dab8dfe27ecb072c6d1c85 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.667156 2100790 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.a444410f ...
	I0916 10:51:31.667170 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.a444410f: {Name:mk8222bc8b5c5c51d544f22dd1e577a3ae5bcd67 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.667261 2100790 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.a444410f -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt
	I0916 10:51:31.667349 2100790 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.a444410f -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key
	I0916 10:51:31.667411 2100790 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key
	I0916 10:51:31.667429 2100790 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt with IP's: []
	I0916 10:51:31.952861 2100790 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt ...
	I0916 10:51:31.952896 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt: {Name:mk9a094598ad4046a76a8dc2769982f6cbd4e0ed Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.953079 2100790 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key ...
	I0916 10:51:31.953099 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key: {Name:mkfafb6912138d9356c7537f9892bac83afdf10f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:31.953192 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:51:31.953211 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:51:31.953223 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:51:31.953238 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:51:31.953250 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:51:31.953266 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:51:31.953277 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:51:31.953291 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:51:31.953344 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:51:31.953382 2100790 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:51:31.953390 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:51:31.953414 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:51:31.953435 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:51:31.953462 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:51:31.953510 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:51:31.953544 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:51:31.953558 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:31.953569 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:51:31.954199 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:51:31.980348 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:51:32.008722 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:51:32.036840 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:51:32.062567 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 10:51:32.090284 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:51:32.115303 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:51:32.140697 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 10:51:32.166708 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:51:32.192277 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:51:32.217104 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:51:32.242554 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:51:32.262260 2100790 ssh_runner.go:195] Run: openssl version
	I0916 10:51:32.268152 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:51:32.278101 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:51:32.281770 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:51:32.281847 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:51:32.289061 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:51:32.298720 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:51:32.308514 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:32.312223 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:32.312294 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:51:32.319494 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:51:32.335867 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:51:32.346930 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:51:32.351420 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:51:32.351499 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:51:32.359680 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:51:32.370826 2100790 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:51:32.375023 2100790 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:51:32.375081 2100790 kubeadm.go:392] StartCluster: {Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false C
ustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:51:32.375171 2100790 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:51:32.375234 2100790 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:51:32.420260 2100790 cri.go:89] found id: ""
	I0916 10:51:32.420343 2100790 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:51:32.429525 2100790 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 10:51:32.438854 2100790 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 10:51:32.438927 2100790 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 10:51:32.447982 2100790 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 10:51:32.448007 2100790 kubeadm.go:157] found existing configuration files:
	
	I0916 10:51:32.448080 2100790 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 10:51:32.457032 2100790 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 10:51:32.457127 2100790 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 10:51:32.465953 2100790 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 10:51:32.475625 2100790 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 10:51:32.475726 2100790 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 10:51:32.485025 2100790 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 10:51:32.494669 2100790 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 10:51:32.494817 2100790 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 10:51:32.503811 2100790 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 10:51:32.513033 2100790 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 10:51:32.513125 2100790 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 10:51:32.522212 2100790 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 10:51:32.565920 2100790 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 10:51:32.566041 2100790 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 10:51:32.583353 2100790 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 10:51:32.583470 2100790 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 10:51:32.583532 2100790 kubeadm.go:310] OS: Linux
	I0916 10:51:32.583620 2100790 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 10:51:32.583696 2100790 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 10:51:32.583770 2100790 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 10:51:32.583836 2100790 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 10:51:32.583907 2100790 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 10:51:32.583973 2100790 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 10:51:32.584041 2100790 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 10:51:32.584114 2100790 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 10:51:32.584188 2100790 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 10:51:32.642643 2100790 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 10:51:32.642819 2100790 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 10:51:32.642953 2100790 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 10:51:32.650061 2100790 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 10:51:32.654287 2100790 out.go:235]   - Generating certificates and keys ...
	I0916 10:51:32.654479 2100790 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 10:51:32.654589 2100790 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 10:51:33.003688 2100790 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 10:51:33.229605 2100790 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 10:51:33.501301 2100790 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 10:51:34.304784 2100790 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 10:51:34.784062 2100790 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 10:51:34.784276 2100790 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [ha-234759 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:51:35.165100 2100790 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 10:51:35.165490 2100790 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [ha-234759 localhost] and IPs [192.168.49.2 127.0.0.1 ::1]
	I0916 10:51:35.346008 2100790 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 10:51:35.757653 2100790 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 10:51:35.965344 2100790 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 10:51:35.965628 2100790 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 10:51:36.123196 2100790 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 10:51:36.420436 2100790 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 10:51:36.592158 2100790 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 10:51:37.128675 2100790 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 10:51:37.485995 2100790 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 10:51:37.486713 2100790 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 10:51:37.489599 2100790 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 10:51:37.491884 2100790 out.go:235]   - Booting up control plane ...
	I0916 10:51:37.491996 2100790 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 10:51:37.492073 2100790 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 10:51:37.492682 2100790 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 10:51:37.518113 2100790 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 10:51:37.526950 2100790 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 10:51:37.527211 2100790 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 10:51:37.629200 2100790 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 10:51:37.629328 2100790 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 10:51:39.131106 2100790 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.501922222s
	I0916 10:51:39.131201 2100790 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 10:51:45.633042 2100790 kubeadm.go:310] [api-check] The API server is healthy after 6.501922883s
	I0916 10:51:45.655776 2100790 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 10:51:45.671991 2100790 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 10:51:45.701284 2100790 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 10:51:45.701481 2100790 kubeadm.go:310] [mark-control-plane] Marking the node ha-234759 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 10:51:45.716091 2100790 kubeadm.go:310] [bootstrap-token] Using token: bovj21.slnegqx7eegl94yl
	I0916 10:51:45.718447 2100790 out.go:235]   - Configuring RBAC rules ...
	I0916 10:51:45.718576 2100790 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 10:51:45.730907 2100790 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 10:51:45.742459 2100790 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 10:51:45.747476 2100790 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 10:51:45.752869 2100790 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 10:51:45.761822 2100790 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 10:51:46.041055 2100790 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 10:51:46.469705 2100790 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 10:51:47.040170 2100790 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 10:51:47.041362 2100790 kubeadm.go:310] 
	I0916 10:51:47.041441 2100790 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 10:51:47.041453 2100790 kubeadm.go:310] 
	I0916 10:51:47.041531 2100790 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 10:51:47.041540 2100790 kubeadm.go:310] 
	I0916 10:51:47.041565 2100790 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 10:51:47.041628 2100790 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 10:51:47.041684 2100790 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 10:51:47.041693 2100790 kubeadm.go:310] 
	I0916 10:51:47.041747 2100790 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 10:51:47.041755 2100790 kubeadm.go:310] 
	I0916 10:51:47.041803 2100790 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 10:51:47.041811 2100790 kubeadm.go:310] 
	I0916 10:51:47.041863 2100790 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 10:51:47.041942 2100790 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 10:51:47.042015 2100790 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 10:51:47.042023 2100790 kubeadm.go:310] 
	I0916 10:51:47.042114 2100790 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 10:51:47.042195 2100790 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 10:51:47.042204 2100790 kubeadm.go:310] 
	I0916 10:51:47.042288 2100790 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token bovj21.slnegqx7eegl94yl \
	I0916 10:51:47.042399 2100790 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 10:51:47.042425 2100790 kubeadm.go:310] 	--control-plane 
	I0916 10:51:47.042435 2100790 kubeadm.go:310] 
	I0916 10:51:47.042520 2100790 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 10:51:47.042529 2100790 kubeadm.go:310] 
	I0916 10:51:47.042612 2100790 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token bovj21.slnegqx7eegl94yl \
	I0916 10:51:47.042749 2100790 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 10:51:47.047630 2100790 kubeadm.go:310] W0916 10:51:32.559735    1055 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:51:47.047960 2100790 kubeadm.go:310] W0916 10:51:32.561183    1055 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 10:51:47.048238 2100790 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 10:51:47.048384 2100790 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 10:51:47.048417 2100790 cni.go:84] Creating CNI manager for ""
	I0916 10:51:47.048425 2100790 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 10:51:47.050384 2100790 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 10:51:47.051909 2100790 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 10:51:47.056111 2100790 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 10:51:47.056132 2100790 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 10:51:47.078806 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 10:51:47.373387 2100790 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 10:51:47.373527 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:47.373625 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-234759 minikube.k8s.io/updated_at=2024_09_16T10_51_47_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-234759 minikube.k8s.io/primary=true
	I0916 10:51:47.590409 2100790 ops.go:34] apiserver oom_adj: -16
	I0916 10:51:47.590562 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:48.091656 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:48.591260 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:49.090712 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:49.590628 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:50.090756 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:50.590704 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:51.091550 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 10:51:51.200211 2100790 kubeadm.go:1113] duration metric: took 3.826732027s to wait for elevateKubeSystemPrivileges
	I0916 10:51:51.200246 2100790 kubeadm.go:394] duration metric: took 18.82517119s to StartCluster
	I0916 10:51:51.200264 2100790 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:51.200331 2100790 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:51:51.201094 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:51:51.201326 2100790 start.go:233] HA (multi-control plane) cluster: will skip waiting for primary control-plane node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:51:51.201357 2100790 start.go:241] waiting for startup goroutines ...
	I0916 10:51:51.201373 2100790 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:51:51.201437 2100790 addons.go:69] Setting storage-provisioner=true in profile "ha-234759"
	I0916 10:51:51.201456 2100790 addons.go:234] Setting addon storage-provisioner=true in "ha-234759"
	I0916 10:51:51.201484 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:51:51.202013 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:51.202334 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 10:51:51.202685 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:51:51.202750 2100790 addons.go:69] Setting default-storageclass=true in profile "ha-234759"
	I0916 10:51:51.202767 2100790 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "ha-234759"
	I0916 10:51:51.203045 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:51.232784 2100790 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:51:51.233068 2100790 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:51:51.233563 2100790 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 10:51:51.233711 2100790 addons.go:234] Setting addon default-storageclass=true in "ha-234759"
	I0916 10:51:51.233747 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:51:51.234206 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:51:51.245576 2100790 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 10:51:51.247765 2100790 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:51:51.247791 2100790 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 10:51:51.247857 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:51.276395 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:51.288216 2100790 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 10:51:51.288242 2100790 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 10:51:51.288307 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:51:51.323194 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:51:51.442942 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.49.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 10:51:51.578745 2100790 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 10:51:51.586010 2100790 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 10:51:52.029810 2100790 start.go:971] {"host.minikube.internal": 192.168.49.1} host record injected into CoreDNS's ConfigMap
	I0916 10:51:52.029991 2100790 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 10:51:52.030051 2100790 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 10:51:52.030174 2100790 round_trippers.go:463] GET https://192.168.49.254:8443/apis/storage.k8s.io/v1/storageclasses
	I0916 10:51:52.030199 2100790 round_trippers.go:469] Request Headers:
	I0916 10:51:52.030220 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:51:52.030237 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:51:52.045481 2100790 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 10:51:52.046320 2100790 round_trippers.go:463] PUT https://192.168.49.254:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 10:51:52.046381 2100790 round_trippers.go:469] Request Headers:
	I0916 10:51:52.046403 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:51:52.046419 2100790 round_trippers.go:473]     Content-Type: application/json
	I0916 10:51:52.046454 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:51:52.049300 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:51:52.334786 2100790 out.go:177] * Enabled addons: default-storageclass, storage-provisioner
	I0916 10:51:52.337300 2100790 addons.go:510] duration metric: took 1.135922915s for enable addons: enabled=[default-storageclass storage-provisioner]
	I0916 10:51:52.337397 2100790 start.go:246] waiting for cluster config update ...
	I0916 10:51:52.337434 2100790 start.go:255] writing updated cluster config ...
	I0916 10:51:52.339645 2100790 out.go:201] 
	I0916 10:51:52.342478 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:51:52.342628 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:51:52.345664 2100790 out.go:177] * Starting "ha-234759-m02" control-plane node in "ha-234759" cluster
	I0916 10:51:52.348517 2100790 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:51:52.351782 2100790 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:51:52.355357 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:51:52.355473 2100790 cache.go:56] Caching tarball of preloaded images
	I0916 10:51:52.355439 2100790 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:51:52.355802 2100790 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:51:52.355838 2100790 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:51:52.355993 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	W0916 10:51:52.384717 2100790 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:51:52.384735 2100790 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:51:52.384821 2100790 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:51:52.384838 2100790 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:51:52.384844 2100790 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:51:52.384852 2100790 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:51:52.384857 2100790 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:51:52.386386 2100790 image.go:273] response: 
	I0916 10:51:52.505271 2100790 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:51:52.505311 2100790 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:51:52.505343 2100790 start.go:360] acquireMachinesLock for ha-234759-m02: {Name:mk8d038416b8f502330f7520e1c7f720d49da587 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:51:52.505459 2100790 start.go:364] duration metric: took 94.506µs to acquireMachinesLock for "ha-234759-m02"
	I0916 10:51:52.505489 2100790 start.go:93] Provisioning new machine with config: &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9
PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:51:52.505574 2100790 start.go:125] createHost starting for "m02" (driver="docker")
	I0916 10:51:52.508620 2100790 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:51:52.508741 2100790 start.go:159] libmachine.API.Create for "ha-234759" (driver="docker")
	I0916 10:51:52.508770 2100790 client.go:168] LocalClient.Create starting
	I0916 10:51:52.508832 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 10:51:52.508865 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:52.508881 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:52.508937 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 10:51:52.508954 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:51:52.508963 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:51:52.509203 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:51:52.532873 2100790 network_create.go:77] Found existing network {name:ha-234759 subnet:0x40016d38f0 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 49 1] mtu:1500}
	I0916 10:51:52.532927 2100790 kic.go:121] calculated static IP "192.168.49.3" for the "ha-234759-m02" container
	I0916 10:51:52.533072 2100790 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:51:52.554402 2100790 cli_runner.go:164] Run: docker volume create ha-234759-m02 --label name.minikube.sigs.k8s.io=ha-234759-m02 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:51:52.580882 2100790 oci.go:103] Successfully created a docker volume ha-234759-m02
	I0916 10:51:52.580970 2100790 cli_runner.go:164] Run: docker run --rm --name ha-234759-m02-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759-m02 --entrypoint /usr/bin/test -v ha-234759-m02:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:51:53.343000 2100790 oci.go:107] Successfully prepared a docker volume ha-234759-m02
	I0916 10:51:53.343050 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:51:53.343072 2100790 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:51:53.343147 2100790 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:51:57.361868 2100790 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.018676781s)
	I0916 10:51:57.361899 2100790 kic.go:203] duration metric: took 4.018823003s to extract preloaded images to volume ...
	W0916 10:51:57.362036 2100790 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:51:57.362180 2100790 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:51:57.425571 2100790 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-234759-m02 --name ha-234759-m02 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759-m02 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-234759-m02 --network ha-234759 --ip 192.168.49.3 --volume ha-234759-m02:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:51:57.764960 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Running}}
	I0916 10:51:57.783068 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:51:57.811190 2100790 cli_runner.go:164] Run: docker exec ha-234759-m02 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:51:57.881077 2100790 oci.go:144] the created container "ha-234759-m02" has a running status.
	I0916 10:51:57.881102 2100790 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa...
	I0916 10:51:58.446855 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:51:58.446898 2100790 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:51:58.472337 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:51:58.494908 2100790 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:51:58.494928 2100790 kic_runner.go:114] Args: [docker exec --privileged ha-234759-m02 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:51:58.602397 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:51:58.622640 2100790 machine.go:93] provisionDockerMachine start ...
	I0916 10:51:58.622764 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:51:58.646223 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:58.646502 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40602 <nil> <nil>}
	I0916 10:51:58.646518 2100790 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:51:58.798362 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m02
	
	I0916 10:51:58.798388 2100790 ubuntu.go:169] provisioning hostname "ha-234759-m02"
	I0916 10:51:58.798460 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:51:58.819627 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:58.819965 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40602 <nil> <nil>}
	I0916 10:51:58.820003 2100790 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759-m02 && echo "ha-234759-m02" | sudo tee /etc/hostname
	I0916 10:51:58.985423 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m02
	
	I0916 10:51:58.985503 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:51:59.009340 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:51:59.009597 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40602 <nil> <nil>}
	I0916 10:51:59.009621 2100790 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:51:59.147069 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:51:59.147103 2100790 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:51:59.147119 2100790 ubuntu.go:177] setting up certificates
	I0916 10:51:59.147128 2100790 provision.go:84] configureAuth start
	I0916 10:51:59.147189 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:51:59.163758 2100790 provision.go:143] copyHostCerts
	I0916 10:51:59.163806 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:51:59.164072 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:51:59.164087 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:51:59.164170 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:51:59.164252 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:51:59.164269 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:51:59.164274 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:51:59.164298 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:51:59.164538 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:51:59.164570 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:51:59.164576 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:51:59.164618 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:51:59.164691 2100790 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759-m02 san=[127.0.0.1 192.168.49.3 ha-234759-m02 localhost minikube]
	I0916 10:51:59.800624 2100790 provision.go:177] copyRemoteCerts
	I0916 10:51:59.800701 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:51:59.800744 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:51:59.817936 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40602 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:51:59.915681 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:51:59.915747 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:51:59.940507 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:51:59.940571 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:51:59.964933 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:51:59.964996 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:51:59.990554 2100790 provision.go:87] duration metric: took 843.41239ms to configureAuth
	I0916 10:51:59.990623 2100790 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:51:59.990884 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:51:59.990900 2100790 machine.go:96] duration metric: took 1.368237553s to provisionDockerMachine
	I0916 10:51:59.990910 2100790 client.go:171] duration metric: took 7.482131873s to LocalClient.Create
	I0916 10:51:59.990936 2100790 start.go:167] duration metric: took 7.482195824s to libmachine.API.Create "ha-234759"
	I0916 10:51:59.990949 2100790 start.go:293] postStartSetup for "ha-234759-m02" (driver="docker")
	I0916 10:51:59.990959 2100790 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:51:59.991025 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:51:59.991073 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:52:00.028084 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40602 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:52:00.230114 2100790 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:52:00.263730 2100790 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:52:00.263833 2100790 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:52:00.263861 2100790 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:52:00.263902 2100790 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:52:00.263938 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:52:00.271489 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:52:00.272299 2100790 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:52:00.272486 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:52:00.275328 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:52:00.301426 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:52:00.377609 2100790 start.go:296] duration metric: took 386.642629ms for postStartSetup
	I0916 10:52:00.378172 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:52:00.419790 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:52:00.420162 2100790 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:52:00.420229 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:52:00.460549 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40602 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:52:00.569639 2100790 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:52:00.583439 2100790 start.go:128] duration metric: took 8.077829888s to createHost
	I0916 10:52:00.583526 2100790 start.go:83] releasing machines lock for "ha-234759-m02", held for 8.078053304s
	I0916 10:52:00.583641 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:52:00.606976 2100790 out.go:177] * Found network options:
	I0916 10:52:00.611187 2100790 out.go:177]   - NO_PROXY=192.168.49.2
	W0916 10:52:00.613159 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:52:00.613226 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:52:00.613309 2100790 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:52:00.613366 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:52:00.613715 2100790 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:52:00.613795 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:52:00.633041 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40602 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:52:00.640527 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40602 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:52:00.727777 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:52:00.861679 2100790 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:52:00.861766 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:52:00.891004 2100790 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:52:00.891082 2100790 start.go:495] detecting cgroup driver to use...
	I0916 10:52:00.891123 2100790 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:52:00.891189 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:52:00.904160 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:52:00.915856 2100790 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:52:00.915927 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:52:00.930988 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:52:00.947340 2100790 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:52:01.033056 2100790 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:52:01.124369 2100790 docker.go:233] disabling docker service ...
	I0916 10:52:01.124439 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:52:01.151205 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:52:01.164618 2100790 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:52:01.259473 2100790 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:52:01.349046 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:52:01.361334 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:52:01.382234 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:52:01.393849 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:52:01.405278 2100790 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:52:01.405400 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:52:01.416463 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:52:01.427693 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:52:01.438592 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:52:01.449156 2100790 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:52:01.459129 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:52:01.470471 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:52:01.481928 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:52:01.495717 2100790 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:52:01.505510 2100790 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:52:01.514899 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:01.602912 2100790 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:52:01.744817 2100790 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:52:01.744897 2100790 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:52:01.749031 2100790 start.go:563] Will wait 60s for crictl version
	I0916 10:52:01.749101 2100790 ssh_runner.go:195] Run: which crictl
	I0916 10:52:01.752795 2100790 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:52:01.792221 2100790 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:52:01.792298 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:52:01.814556 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:52:01.839285 2100790 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:52:01.840833 2100790 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:52:01.842864 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:52:01.858110 2100790 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:52:01.861691 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:01.872718 2100790 mustload.go:65] Loading cluster: ha-234759
	I0916 10:52:01.872937 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:01.873219 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:52:01.889828 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:52:01.890129 2100790 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.3
	I0916 10:52:01.890138 2100790 certs.go:194] generating shared ca certs ...
	I0916 10:52:01.890152 2100790 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:01.890272 2100790 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:52:01.890308 2100790 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:52:01.890315 2100790 certs.go:256] generating profile certs ...
	I0916 10:52:01.890393 2100790 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key
	I0916 10:52:01.890420 2100790 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.991f748e
	I0916 10:52:01.890433 2100790 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.991f748e with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.254]
	I0916 10:52:02.819883 2100790 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.991f748e ...
	I0916 10:52:02.819920 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.991f748e: {Name:mk59c37e23909c525e021174c1d94dbb826982fa Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:02.820129 2100790 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.991f748e ...
	I0916 10:52:02.820145 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.991f748e: {Name:mkeca968d2ea721cee90ea9eb97a24b334102416 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:02.820235 2100790 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.991f748e -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt
	I0916 10:52:02.820375 2100790 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.991f748e -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key
	I0916 10:52:02.820519 2100790 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key
	I0916 10:52:02.820538 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:52:02.820554 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:52:02.820570 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:52:02.820587 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:52:02.820603 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:52:02.820618 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:52:02.820629 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:52:02.820643 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:52:02.820695 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:52:02.820728 2100790 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:52:02.820740 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:52:02.820764 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:52:02.820794 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:52:02.820821 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:52:02.820866 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:52:02.820899 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:52:02.820934 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:52:02.820950 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:02.821015 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:52:02.840635 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:52:02.951102 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:52:02.955103 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:52:02.968406 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:52:02.973140 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1679 bytes)
	I0916 10:52:02.986161 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:52:02.989843 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:52:03.004994 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:52:03.011769 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1679 bytes)
	I0916 10:52:03.026584 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:52:03.030915 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:52:03.045806 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:52:03.049980 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1675 bytes)
	I0916 10:52:03.064058 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:52:03.091835 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:52:03.120876 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:52:03.150236 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:52:03.178864 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1436 bytes)
	I0916 10:52:03.205094 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:52:03.231522 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:52:03.257524 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 10:52:03.283880 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:52:03.310585 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:52:03.336527 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:52:03.361752 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:52:03.381652 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1679 bytes)
	I0916 10:52:03.400412 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:52:03.425598 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1679 bytes)
	I0916 10:52:03.450347 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:52:03.477087 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1675 bytes)
	I0916 10:52:03.501921 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:52:03.526309 2100790 ssh_runner.go:195] Run: openssl version
	I0916 10:52:03.533275 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:52:03.545737 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:03.550223 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:03.550366 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:03.558081 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:52:03.572153 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:52:03.591282 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:52:03.595886 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:52:03.595959 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:52:03.604395 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:52:03.616910 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:52:03.630330 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:52:03.634799 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:52:03.634880 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:52:03.650545 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:52:03.660697 2100790 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:52:03.665539 2100790 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:52:03.665590 2100790 kubeadm.go:934] updating node {m02 192.168.49.3 8443 v1.31.1 containerd true true} ...
	I0916 10:52:03.665681 2100790 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:52:03.665709 2100790 kube-vip.go:115] generating kube-vip config ...
	I0916 10:52:03.665757 2100790 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:52:03.682968 2100790 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:52:03.683042 2100790 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:52:03.683106 2100790 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:52:03.694685 2100790 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:52:03.694760 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:52:03.708316 2100790 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:52:03.731363 2100790 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:52:03.752346 2100790 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:52:03.773613 2100790 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:52:03.777406 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:03.789030 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:03.883425 2100790 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:52:03.906645 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:52:03.907112 2100790 start.go:317] joinCluster: &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2
000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:52:03.907240 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 10:52:03.907325 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:52:03.927463 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:52:04.101106 2100790 start.go:343] trying to join control-plane node "m02" to cluster: &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:52:04.101156 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token me3nxf.6d1pg723ya0vtuep --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=ha-234759-m02 --control-plane --apiserver-advertise-address=192.168.49.3 --apiserver-bind-port=8443"
	I0916 10:52:13.481760 2100790 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token me3nxf.6d1pg723ya0vtuep --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=ha-234759-m02 --control-plane --apiserver-advertise-address=192.168.49.3 --apiserver-bind-port=8443": (9.380581228s)
	I0916 10:52:13.481795 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 10:52:13.953011 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-234759-m02 minikube.k8s.io/updated_at=2024_09_16T10_52_13_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-234759 minikube.k8s.io/primary=false
	I0916 10:52:14.131350 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-234759-m02 node-role.kubernetes.io/control-plane:NoSchedule-
	I0916 10:52:14.297580 2100790 start.go:319] duration metric: took 10.390468491s to joinCluster
	I0916 10:52:14.297636 2100790 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:52:14.298016 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:14.300678 2100790 out.go:177] * Verifying Kubernetes components...
	I0916 10:52:14.302524 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:14.540418 2100790 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:52:14.560947 2100790 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:52:14.561209 2100790 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:52:14.561286 2100790 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:52:14.561506 2100790 node_ready.go:35] waiting up to 6m0s for node "ha-234759-m02" to be "Ready" ...
	I0916 10:52:14.561587 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:14.561593 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.561602 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.561606 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.584850 2100790 round_trippers.go:574] Response Status: 200 OK in 23 milliseconds
	I0916 10:52:14.586211 2100790 node_ready.go:49] node "ha-234759-m02" has status "Ready":"True"
	I0916 10:52:14.586235 2100790 node_ready.go:38] duration metric: took 24.71438ms for node "ha-234759-m02" to be "Ready" ...
	I0916 10:52:14.586246 2100790 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:52:14.586340 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:52:14.586348 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.586356 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.586360 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.592639 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:52:14.601938 2100790 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.602139 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:52:14.602166 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.602187 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.602205 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.611702 2100790 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:52:14.613067 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:14.613128 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.613152 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.613171 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.617455 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:14.618049 2100790 pod_ready.go:93] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:14.618103 2100790 pod_ready.go:82] duration metric: took 16.059189ms for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.618132 2100790 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.618226 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vqj8q
	I0916 10:52:14.618250 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.618270 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.618289 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.624350 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:52:14.625435 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:14.625491 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.625518 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.625535 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.630962 2100790 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:52:14.631768 2100790 pod_ready.go:93] pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:14.631819 2100790 pod_ready.go:82] duration metric: took 13.666606ms for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.631850 2100790 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.631951 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759
	I0916 10:52:14.631977 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.631999 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.632017 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.660011 2100790 round_trippers.go:574] Response Status: 200 OK in 27 milliseconds
	I0916 10:52:14.661121 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:14.661188 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.661209 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.661227 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.665606 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:14.666668 2100790 pod_ready.go:93] pod "etcd-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:14.666737 2100790 pod_ready.go:82] duration metric: took 34.865874ms for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.666763 2100790 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:14.666864 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:14.666889 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.666909 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.666923 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.677483 2100790 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:52:14.678623 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:14.678709 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:14.678735 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:14.678753 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:14.694229 2100790 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 10:52:15.167848 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:15.167935 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:15.167958 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:15.167979 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:15.171052 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:15.172278 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:15.172346 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:15.172370 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:15.172389 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:15.179672 2100790 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:52:15.667582 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:15.667656 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:15.667678 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:15.667697 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:15.670893 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:15.671979 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:15.672035 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:15.672058 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:15.672073 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:15.674729 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:16.167383 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:16.167461 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:16.167485 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:16.167502 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:16.171444 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:16.172570 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:16.172631 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:16.172654 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:16.172673 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:16.179641 2100790 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:52:16.667381 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:16.667451 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:16.667474 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:16.667495 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:16.670302 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:16.671585 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:16.671648 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:16.671671 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:16.671689 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:16.674182 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:16.675065 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:17.167803 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:17.167878 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:17.167900 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:17.167917 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:17.170713 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:17.171795 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:17.171854 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:17.171878 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:17.171897 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:17.175219 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:17.667820 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:17.667900 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:17.667922 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:17.667942 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:17.670692 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:17.671566 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:17.671630 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:17.671652 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:17.671684 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:17.673987 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:18.167956 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:18.167981 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:18.167991 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:18.167997 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:18.171129 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:18.171904 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:18.171925 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:18.171935 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:18.171941 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:18.174507 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:18.667557 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:18.667635 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:18.667658 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:18.667676 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:18.670277 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:18.670953 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:18.670967 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:18.670976 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:18.670981 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:18.673323 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:19.166997 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:19.167025 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:19.167034 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:19.167039 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:19.169857 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:19.170558 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:19.170579 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:19.170589 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:19.170595 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:19.172990 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:19.173467 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:19.666983 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:19.667009 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:19.667018 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:19.667023 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:19.669930 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:19.670553 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:19.670574 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:19.670583 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:19.670587 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:19.672919 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:20.168223 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:20.168254 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:20.168264 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:20.168271 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:20.171922 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:20.172981 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:20.173007 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:20.173017 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:20.173051 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:20.175974 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:20.667867 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:20.667891 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:20.667900 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:20.667904 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:20.670793 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:20.671604 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:20.671620 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:20.671630 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:20.671636 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:20.673952 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:21.166989 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:21.167014 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:21.167024 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:21.167031 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:21.169947 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:21.170947 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:21.170969 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:21.170979 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:21.170985 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:21.174293 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:21.174844 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:21.667832 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:21.667863 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:21.667873 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:21.667878 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:21.671093 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:21.671706 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:21.671729 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:21.671739 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:21.671746 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:21.674363 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:22.167512 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:22.167538 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:22.167548 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:22.167551 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:22.171401 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:22.172139 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:22.172163 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:22.172172 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:22.172176 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:22.179843 2100790 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:52:22.667683 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:22.667707 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:22.667717 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:22.667722 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:22.670670 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:22.671547 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:22.671568 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:22.671579 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:22.671584 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:22.674452 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:23.167992 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:23.168017 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:23.168025 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:23.168029 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:23.170975 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:23.172096 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:23.172118 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:23.172129 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:23.172134 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:23.174972 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:23.175476 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:23.667184 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:23.667209 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:23.667220 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:23.667225 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:23.670019 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:23.671058 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:23.671081 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:23.671090 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:23.671096 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:23.673575 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:24.167919 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:24.167946 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:24.167956 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.167961 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.170964 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:24.171998 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:24.172026 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:24.172036 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.172042 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.174538 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:24.667100 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:24.667131 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:24.667144 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.667149 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.669964 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:24.670872 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:24.670899 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:24.670909 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:24.670914 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:24.673553 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:25.167699 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:25.167724 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:25.167735 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.167741 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.170797 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:25.171606 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:25.171624 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:25.171635 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.171640 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.174513 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:25.667777 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:25.667799 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:25.667809 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.667813 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.670520 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:25.671210 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:25.671229 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:25.671238 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:25.671242 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:25.673671 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:25.674216 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:26.167934 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:26.167962 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:26.167972 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.167977 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.171870 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:26.172591 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:26.172614 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:26.172624 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.172629 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.175357 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:26.667604 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:26.667639 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:26.667724 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.667733 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.670968 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:26.672105 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:26.672127 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:26.672139 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:26.672143 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:26.675223 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:27.167599 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:27.167690 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:27.167709 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.167714 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.170579 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:27.171549 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:27.171576 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:27.171586 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.171592 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.175255 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:27.667053 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:27.667076 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:27.667085 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.667089 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.669935 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:27.670935 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:27.670952 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:27.670961 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:27.670966 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:27.673543 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:27.674283 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:28.167668 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:28.167695 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:28.167705 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.167710 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.171000 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:28.172051 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:28.172076 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:28.172088 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.172099 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.174803 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:28.667481 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:28.667503 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:28.667512 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.667516 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.670563 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:28.671256 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:28.671275 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:28.671284 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:28.671289 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:28.673782 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.167691 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:29.167715 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:29.167725 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.167730 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.170715 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.171714 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:29.171731 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:29.171740 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.171744 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.174250 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.667017 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:29.667065 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:29.667075 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.667080 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.669982 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:29.670811 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:29.670829 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:29.670842 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:29.670846 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:29.673571 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.167798 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:30.167829 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.167840 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.167845 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.171429 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:30.172187 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:30.172209 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.172231 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.172236 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.175751 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:30.176458 2100790 pod_ready.go:103] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"False"
	I0916 10:52:30.667785 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:52:30.667809 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.667818 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.667824 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.670772 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.671464 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:30.671487 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.671497 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.671503 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.674188 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.674744 2100790 pod_ready.go:93] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:30.674764 2100790 pod_ready.go:82] duration metric: took 16.007974361s for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.674781 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.674846 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:52:30.674857 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.674865 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.674869 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.677445 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.678126 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:30.678146 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.678157 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.678165 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.680721 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.681352 2100790 pod_ready.go:93] pod "kube-apiserver-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:30.681374 2100790 pod_ready.go:82] duration metric: took 6.583737ms for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.681387 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.681496 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:52:30.681507 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.681515 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.681520 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.684263 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.685052 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:30.685069 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.685079 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.685083 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.687823 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.688563 2100790 pod_ready.go:93] pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:30.688597 2100790 pod_ready.go:82] duration metric: took 7.201514ms for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.688615 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.688706 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:52:30.688716 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.688724 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.688728 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.691668 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.692464 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:30.692482 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.692492 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.692497 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.695275 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.695848 2100790 pod_ready.go:93] pod "kube-controller-manager-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:30.695867 2100790 pod_ready.go:82] duration metric: took 7.234695ms for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.695879 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:30.695941 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:52:30.695950 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.695959 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.695965 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.698602 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:30.699792 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:30.699814 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:30.699824 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:30.699829 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:30.702393 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:31.196840 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:52:31.196873 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.196883 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.196887 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.199973 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:31.200902 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:31.200924 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.200935 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.200939 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.203645 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:31.204177 2100790 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:31.204198 2100790 pod_ready.go:82] duration metric: took 508.31135ms for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:31.204210 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:31.268182 2100790 request.go:632] Waited for 63.903624ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:52:31.268295 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:52:31.268308 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.268317 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.268321 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.271468 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:31.468456 2100790 request.go:632] Waited for 195.984535ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:31.468530 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:31.468567 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.468581 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.468586 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.471707 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:31.472503 2100790 pod_ready.go:93] pod "kube-proxy-f4jm2" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:31.472525 2100790 pod_ready.go:82] duration metric: took 268.307873ms for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:31.472537 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:31.667967 2100790 request.go:632] Waited for 195.308675ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:52:31.668056 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:52:31.668073 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.668083 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.668091 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.671256 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:31.868481 2100790 request.go:632] Waited for 196.381744ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:31.868541 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:31.868546 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:31.868556 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:31.868562 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:31.871496 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:31.872041 2100790 pod_ready.go:93] pod "kube-proxy-gwdl4" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:31.872065 2100790 pod_ready.go:82] duration metric: took 399.491207ms for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:31.872077 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:32.068578 2100790 request.go:632] Waited for 196.397932ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:52:32.068661 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:52:32.068674 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.068682 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.068687 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.072060 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:32.267941 2100790 request.go:632] Waited for 195.260306ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:32.268053 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:52:32.268063 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.268080 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.268084 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.271031 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:32.271654 2100790 pod_ready.go:93] pod "kube-scheduler-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:32.271675 2100790 pod_ready.go:82] duration metric: took 399.575473ms for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:32.271687 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:32.468750 2100790 request.go:632] Waited for 196.979688ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:52:32.468841 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:52:32.468855 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.468865 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.468871 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.471963 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:32.667847 2100790 request.go:632] Waited for 195.248598ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:32.667900 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:52:32.667922 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.667934 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.667938 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.670669 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:52:32.671452 2100790 pod_ready.go:93] pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:52:32.671476 2100790 pod_ready.go:82] duration metric: took 399.781806ms for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:52:32.671490 2100790 pod_ready.go:39] duration metric: took 18.085232464s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:52:32.671504 2100790 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:52:32.671578 2100790 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:52:32.682804 2100790 api_server.go:72] duration metric: took 18.385130201s to wait for apiserver process to appear ...
	I0916 10:52:32.682827 2100790 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:52:32.682865 2100790 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:52:32.691984 2100790 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:52:32.692057 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:52:32.692067 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.692076 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.692083 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.692953 2100790 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 10:52:32.693059 2100790 api_server.go:141] control plane version: v1.31.1
	I0916 10:52:32.693075 2100790 api_server.go:131] duration metric: took 10.240715ms to wait for apiserver health ...
	I0916 10:52:32.693083 2100790 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:52:32.868465 2100790 request.go:632] Waited for 175.304271ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:52:32.868523 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:52:32.868529 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:32.868538 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:32.868546 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:32.873423 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:32.879501 2100790 system_pods.go:59] 17 kube-system pods found
	I0916 10:52:32.879541 2100790 system_pods.go:61] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:52:32.879548 2100790 system_pods.go:61] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:52:32.879553 2100790 system_pods.go:61] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:52:32.879559 2100790 system_pods.go:61] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:52:32.879563 2100790 system_pods.go:61] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:52:32.879568 2100790 system_pods.go:61] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:52:32.879572 2100790 system_pods.go:61] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:52:32.879576 2100790 system_pods.go:61] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:52:32.879581 2100790 system_pods.go:61] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:52:32.879593 2100790 system_pods.go:61] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:52:32.879599 2100790 system_pods.go:61] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:52:32.879607 2100790 system_pods.go:61] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:52:32.879611 2100790 system_pods.go:61] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:52:32.879614 2100790 system_pods.go:61] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:52:32.879618 2100790 system_pods.go:61] "kube-vip-ha-234759" [41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e] Running
	I0916 10:52:32.879622 2100790 system_pods.go:61] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:52:32.879625 2100790 system_pods.go:61] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:52:32.879633 2100790 system_pods.go:74] duration metric: took 186.543914ms to wait for pod list to return data ...
	I0916 10:52:32.879643 2100790 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:52:33.068049 2100790 request.go:632] Waited for 188.323464ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:52:33.068141 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:52:33.068152 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:33.068162 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:33.068172 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:33.071458 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:33.071767 2100790 default_sa.go:45] found service account: "default"
	I0916 10:52:33.071790 2100790 default_sa.go:55] duration metric: took 192.139596ms for default service account to be created ...
	I0916 10:52:33.071800 2100790 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:52:33.268106 2100790 request.go:632] Waited for 196.218709ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:52:33.268195 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:52:33.268211 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:33.268220 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:33.268226 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:33.272528 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:52:33.285834 2100790 system_pods.go:86] 17 kube-system pods found
	I0916 10:52:33.285880 2100790 system_pods.go:89] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:52:33.285888 2100790 system_pods.go:89] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:52:33.285894 2100790 system_pods.go:89] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:52:33.285899 2100790 system_pods.go:89] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:52:33.285904 2100790 system_pods.go:89] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:52:33.285908 2100790 system_pods.go:89] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:52:33.285912 2100790 system_pods.go:89] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:52:33.285917 2100790 system_pods.go:89] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:52:33.285922 2100790 system_pods.go:89] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:52:33.285934 2100790 system_pods.go:89] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:52:33.285938 2100790 system_pods.go:89] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:52:33.285946 2100790 system_pods.go:89] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:52:33.285950 2100790 system_pods.go:89] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:52:33.285958 2100790 system_pods.go:89] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:52:33.285962 2100790 system_pods.go:89] "kube-vip-ha-234759" [41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e] Running
	I0916 10:52:33.285966 2100790 system_pods.go:89] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:52:33.285969 2100790 system_pods.go:89] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:52:33.285978 2100790 system_pods.go:126] duration metric: took 214.173279ms to wait for k8s-apps to be running ...
	I0916 10:52:33.285988 2100790 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:52:33.286050 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:52:33.298444 2100790 system_svc.go:56] duration metric: took 12.444057ms WaitForService to wait for kubelet
	I0916 10:52:33.298535 2100790 kubeadm.go:582] duration metric: took 19.000872213s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:52:33.298573 2100790 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:52:33.467815 2100790 request.go:632] Waited for 169.134373ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:52:33.467912 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:52:33.467927 2100790 round_trippers.go:469] Request Headers:
	I0916 10:52:33.467936 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:52:33.467941 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:52:33.471310 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:52:33.472264 2100790 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:52:33.472297 2100790 node_conditions.go:123] node cpu capacity is 2
	I0916 10:52:33.472310 2100790 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:52:33.472315 2100790 node_conditions.go:123] node cpu capacity is 2
	I0916 10:52:33.472321 2100790 node_conditions.go:105] duration metric: took 173.737143ms to run NodePressure ...
	I0916 10:52:33.472334 2100790 start.go:241] waiting for startup goroutines ...
	I0916 10:52:33.472361 2100790 start.go:255] writing updated cluster config ...
	I0916 10:52:33.475639 2100790 out.go:201] 
	I0916 10:52:33.478649 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:33.478907 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:52:33.481979 2100790 out.go:177] * Starting "ha-234759-m03" control-plane node in "ha-234759" cluster
	I0916 10:52:33.484445 2100790 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:52:33.486955 2100790 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:52:33.489424 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:52:33.489443 2100790 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:52:33.489515 2100790 cache.go:56] Caching tarball of preloaded images
	I0916 10:52:33.489717 2100790 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:52:33.489746 2100790 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:52:33.489911 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	W0916 10:52:33.509372 2100790 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:52:33.509396 2100790 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:52:33.509476 2100790 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:52:33.509500 2100790 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:52:33.509508 2100790 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:52:33.509516 2100790 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:52:33.509522 2100790 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:52:33.510938 2100790 image.go:273] response: 
	I0916 10:52:33.636459 2100790 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:52:33.636501 2100790 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:52:33.636535 2100790 start.go:360] acquireMachinesLock for ha-234759-m03: {Name:mk5869e6facf3d1797569b1a88c6d42d2b487fed Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:52:33.636661 2100790 start.go:364] duration metric: took 103.466µs to acquireMachinesLock for "ha-234759-m03"
	I0916 10:52:33.636694 2100790 start.go:93] Provisioning new machine with config: &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m03 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:fals
e kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: Sock
etVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m03 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:52:33.636830 2100790 start.go:125] createHost starting for "m03" (driver="docker")
	I0916 10:52:33.640186 2100790 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 10:52:33.640309 2100790 start.go:159] libmachine.API.Create for "ha-234759" (driver="docker")
	I0916 10:52:33.640345 2100790 client.go:168] LocalClient.Create starting
	I0916 10:52:33.640440 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 10:52:33.640482 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:52:33.640501 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:52:33.640559 2100790 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 10:52:33.640581 2100790 main.go:141] libmachine: Decoding PEM data...
	I0916 10:52:33.640593 2100790 main.go:141] libmachine: Parsing certificate...
	I0916 10:52:33.640856 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:52:33.657119 2100790 network_create.go:77] Found existing network {name:ha-234759 subnet:0x4002072780 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 49 1] mtu:1500}
	I0916 10:52:33.657166 2100790 kic.go:121] calculated static IP "192.168.49.4" for the "ha-234759-m03" container
	I0916 10:52:33.657247 2100790 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 10:52:33.675429 2100790 cli_runner.go:164] Run: docker volume create ha-234759-m03 --label name.minikube.sigs.k8s.io=ha-234759-m03 --label created_by.minikube.sigs.k8s.io=true
	I0916 10:52:33.693469 2100790 oci.go:103] Successfully created a docker volume ha-234759-m03
	I0916 10:52:33.693560 2100790 cli_runner.go:164] Run: docker run --rm --name ha-234759-m03-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759-m03 --entrypoint /usr/bin/test -v ha-234759-m03:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 10:52:34.347834 2100790 oci.go:107] Successfully prepared a docker volume ha-234759-m03
	I0916 10:52:34.347879 2100790 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:52:34.347900 2100790 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 10:52:34.347981 2100790 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759-m03:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 10:52:39.731323 2100790 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v ha-234759-m03:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (5.383287092s)
	I0916 10:52:39.731356 2100790 kic.go:203] duration metric: took 5.383452146s to extract preloaded images to volume ...
	W0916 10:52:39.731499 2100790 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 10:52:39.731619 2100790 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 10:52:39.799291 2100790 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname ha-234759-m03 --name ha-234759-m03 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=ha-234759-m03 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=ha-234759-m03 --network ha-234759 --ip 192.168.49.4 --volume ha-234759-m03:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 10:52:40.207901 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m03 --format={{.State.Running}}
	I0916 10:52:40.227563 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m03 --format={{.State.Status}}
	I0916 10:52:40.249716 2100790 cli_runner.go:164] Run: docker exec ha-234759-m03 stat /var/lib/dpkg/alternatives/iptables
	I0916 10:52:40.334625 2100790 oci.go:144] the created container "ha-234759-m03" has a running status.
	I0916 10:52:40.334657 2100790 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa...
	I0916 10:52:41.936482 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 10:52:41.936533 2100790 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 10:52:41.957360 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m03 --format={{.State.Status}}
	I0916 10:52:41.976652 2100790 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 10:52:41.976677 2100790 kic_runner.go:114] Args: [docker exec --privileged ha-234759-m03 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 10:52:42.046361 2100790 cli_runner.go:164] Run: docker container inspect ha-234759-m03 --format={{.State.Status}}
	I0916 10:52:42.066953 2100790 machine.go:93] provisionDockerMachine start ...
	I0916 10:52:42.067080 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:42.092879 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:42.093191 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40607 <nil> <nil>}
	I0916 10:52:42.093213 2100790 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:52:42.248003 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m03
	
	I0916 10:52:42.248035 2100790 ubuntu.go:169] provisioning hostname "ha-234759-m03"
	I0916 10:52:42.248119 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:42.270492 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:42.270836 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40607 <nil> <nil>}
	I0916 10:52:42.270858 2100790 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759-m03 && echo "ha-234759-m03" | sudo tee /etc/hostname
	I0916 10:52:42.440760 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m03
	
	I0916 10:52:42.440849 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:42.459977 2100790 main.go:141] libmachine: Using SSH client type: native
	I0916 10:52:42.460342 2100790 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40607 <nil> <nil>}
	I0916 10:52:42.460368 2100790 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759-m03' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759-m03/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759-m03' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:52:42.598999 2100790 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:52:42.599024 2100790 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:52:42.599041 2100790 ubuntu.go:177] setting up certificates
	I0916 10:52:42.599054 2100790 provision.go:84] configureAuth start
	I0916 10:52:42.599118 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m03
	I0916 10:52:42.621734 2100790 provision.go:143] copyHostCerts
	I0916 10:52:42.621792 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:52:42.621825 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:52:42.621835 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:52:42.621915 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:52:42.621999 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:52:42.622023 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:52:42.622032 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:52:42.622060 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:52:42.622119 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:52:42.622135 2100790 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:52:42.622139 2100790 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:52:42.622167 2100790 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:52:42.622249 2100790 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759-m03 san=[127.0.0.1 192.168.49.4 ha-234759-m03 localhost minikube]
	I0916 10:52:43.168638 2100790 provision.go:177] copyRemoteCerts
	I0916 10:52:43.168723 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:52:43.168768 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:43.191605 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40607 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:52:43.292375 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:52:43.292458 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:52:43.320575 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:52:43.320646 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:52:43.352225 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:52:43.352289 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:52:43.383018 2100790 provision.go:87] duration metric: took 783.94891ms to configureAuth
	I0916 10:52:43.383048 2100790 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:52:43.383295 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:43.383310 2100790 machine.go:96] duration metric: took 1.316334103s to provisionDockerMachine
	I0916 10:52:43.383316 2100790 client.go:171] duration metric: took 9.742962387s to LocalClient.Create
	I0916 10:52:43.383336 2100790 start.go:167] duration metric: took 9.743028275s to libmachine.API.Create "ha-234759"
	I0916 10:52:43.383347 2100790 start.go:293] postStartSetup for "ha-234759-m03" (driver="docker")
	I0916 10:52:43.383356 2100790 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:52:43.383412 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:52:43.383459 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:43.402806 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40607 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:52:43.501362 2100790 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:52:43.505154 2100790 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:52:43.505192 2100790 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:52:43.505204 2100790 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:52:43.505212 2100790 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:52:43.505223 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:52:43.505291 2100790 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:52:43.505373 2100790 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:52:43.505385 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:52:43.505491 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:52:43.514787 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:52:43.544031 2100790 start.go:296] duration metric: took 160.669508ms for postStartSetup
	I0916 10:52:43.544507 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m03
	I0916 10:52:43.563659 2100790 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:52:43.563988 2100790 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:52:43.564040 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:43.589880 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40607 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:52:43.696567 2100790 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:52:43.703829 2100790 start.go:128] duration metric: took 10.066981633s to createHost
	I0916 10:52:43.703851 2100790 start.go:83] releasing machines lock for "ha-234759-m03", held for 10.067174879s
	I0916 10:52:43.703941 2100790 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m03
	I0916 10:52:43.728755 2100790 out.go:177] * Found network options:
	I0916 10:52:43.730277 2100790 out.go:177]   - NO_PROXY=192.168.49.2,192.168.49.3
	W0916 10:52:43.732042 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:52:43.732070 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:52:43.732093 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:52:43.732109 2100790 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:52:43.732178 2100790 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:52:43.732226 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:43.732508 2100790 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:52:43.732560 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:52:43.755278 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40607 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:52:43.756863 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40607 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:52:43.985074 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:52:44.025512 2100790 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:52:44.025599 2100790 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:52:44.059947 2100790 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 10:52:44.059975 2100790 start.go:495] detecting cgroup driver to use...
	I0916 10:52:44.060012 2100790 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:52:44.060066 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:52:44.073405 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:52:44.085704 2100790 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:52:44.085771 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:52:44.100755 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:52:44.122230 2100790 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:52:44.217546 2100790 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:52:44.314829 2100790 docker.go:233] disabling docker service ...
	I0916 10:52:44.314906 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:52:44.340640 2100790 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:52:44.354834 2100790 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:52:44.449190 2100790 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:52:44.570654 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:52:44.587618 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:52:44.609284 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:52:44.622641 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:52:44.635130 2100790 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:52:44.635207 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:52:44.647258 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:52:44.658735 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:52:44.669861 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:52:44.681123 2100790 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:52:44.691962 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:52:44.702270 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:52:44.713190 2100790 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:52:44.724009 2100790 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:52:44.732828 2100790 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:52:44.741682 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:44.828595 2100790 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:52:44.971335 2100790 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:52:44.971419 2100790 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:52:44.977263 2100790 start.go:563] Will wait 60s for crictl version
	I0916 10:52:44.977335 2100790 ssh_runner.go:195] Run: which crictl
	I0916 10:52:44.981436 2100790 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:52:45.071091 2100790 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:52:45.071255 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:52:45.118340 2100790 ssh_runner.go:195] Run: containerd --version
	I0916 10:52:45.204671 2100790 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:52:45.206516 2100790 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:52:45.208432 2100790 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3
	I0916 10:52:45.210455 2100790 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:52:45.233293 2100790 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:52:45.238776 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:45.256824 2100790 mustload.go:65] Loading cluster: ha-234759
	I0916 10:52:45.257099 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:45.257390 2100790 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:52:45.280225 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:52:45.280623 2100790 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.4
	I0916 10:52:45.280642 2100790 certs.go:194] generating shared ca certs ...
	I0916 10:52:45.280658 2100790 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:45.280866 2100790 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:52:45.280923 2100790 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:52:45.280933 2100790 certs.go:256] generating profile certs ...
	I0916 10:52:45.281028 2100790 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key
	I0916 10:52:45.281065 2100790 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.20d6ef76
	I0916 10:52:45.281084 2100790 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.20d6ef76 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.4 192.168.49.254]
	I0916 10:52:45.923599 2100790 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.20d6ef76 ...
	I0916 10:52:45.923631 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.20d6ef76: {Name:mk3222a7a83ee320a551ff049bf10a6fbc2613f9 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:45.923840 2100790 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.20d6ef76 ...
	I0916 10:52:45.923855 2100790 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.20d6ef76: {Name:mk0605f9dac8410eefcf89ab8e4a9d0e499298f0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:52:45.923954 2100790 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.20d6ef76 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt
	I0916 10:52:45.924110 2100790 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.20d6ef76 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key
	I0916 10:52:45.924247 2100790 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key
	I0916 10:52:45.924268 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:52:45.924284 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:52:45.924299 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:52:45.924312 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:52:45.924328 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:52:45.924342 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:52:45.924352 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:52:45.924367 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:52:45.924420 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:52:45.924453 2100790 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:52:45.924463 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:52:45.924492 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:52:45.924538 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:52:45.924560 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:52:45.924605 2100790 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:52:45.924638 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:45.924653 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:52:45.924669 2100790 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:52:45.924738 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:52:45.943668 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:52:46.039025 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:52:46.042740 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:52:46.055007 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:52:46.060129 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1679 bytes)
	I0916 10:52:46.075570 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:52:46.078959 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:52:46.091670 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:52:46.095768 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1679 bytes)
	I0916 10:52:46.108355 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:52:46.111625 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:52:46.124092 2100790 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:52:46.127582 2100790 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1675 bytes)
	I0916 10:52:46.139778 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:52:46.165776 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:52:46.190671 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:52:46.215910 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:52:46.241695 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1444 bytes)
	I0916 10:52:46.269182 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 10:52:46.294302 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:52:46.320006 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 10:52:46.352325 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:52:46.379052 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:52:46.408296 2100790 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:52:46.435056 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:52:46.453397 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1679 bytes)
	I0916 10:52:46.472282 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:52:46.496774 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1679 bytes)
	I0916 10:52:46.517017 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:52:46.537412 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1675 bytes)
	I0916 10:52:46.563962 2100790 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:52:46.593089 2100790 ssh_runner.go:195] Run: openssl version
	I0916 10:52:46.600293 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:52:46.610760 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:46.618863 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:46.618985 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:52:46.631345 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:52:46.644183 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:52:46.654488 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:52:46.658504 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:52:46.658577 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:52:46.665792 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:52:46.677776 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:52:46.692688 2100790 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:52:46.696966 2100790 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:52:46.697043 2100790 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:52:46.705113 2100790 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:52:46.714753 2100790 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:52:46.718120 2100790 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:52:46.718173 2100790 kubeadm.go:934] updating node {m03 192.168.49.4 8443 v1.31.1 containerd true true} ...
	I0916 10:52:46.718262 2100790 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759-m03 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.4
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:52:46.718305 2100790 kube-vip.go:115] generating kube-vip config ...
	I0916 10:52:46.718357 2100790 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:52:46.731814 2100790 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:52:46.731903 2100790 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:52:46.731987 2100790 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:52:46.741559 2100790 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:52:46.741630 2100790 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:52:46.751422 2100790 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:52:46.770950 2100790 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:52:46.793954 2100790 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:52:46.818114 2100790 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:52:46.822385 2100790 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:52:46.835374 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:46.936591 2100790 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:52:46.953510 2100790 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:52:46.953846 2100790 start.go:317] joinCluster: &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:f
alse kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClien
tPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:52:46.954013 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 10:52:46.954121 2100790 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:52:46.975492 2100790 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:52:47.152327 2100790 start.go:343] trying to join control-plane node "m03" to cluster: &{Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:52:47.152388 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 6gxztb.uf8jk7w93bf819wg --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=ha-234759-m03 --control-plane --apiserver-advertise-address=192.168.49.4 --apiserver-bind-port=8443"
	I0916 10:52:58.761983 2100790 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 6gxztb.uf8jk7w93bf819wg --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=ha-234759-m03 --control-plane --apiserver-advertise-address=192.168.49.4 --apiserver-bind-port=8443": (11.609570413s)
	I0916 10:52:58.762012 2100790 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 10:52:59.293778 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-234759-m03 minikube.k8s.io/updated_at=2024_09_16T10_52_59_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=ha-234759 minikube.k8s.io/primary=false
	I0916 10:52:59.507711 2100790 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-234759-m03 node-role.kubernetes.io/control-plane:NoSchedule-
	I0916 10:52:59.744320 2100790 start.go:319] duration metric: took 12.790468782s to joinCluster
	I0916 10:52:59.744379 2100790 start.go:235] Will wait 6m0s for node &{Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:52:59.744786 2100790 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:52:59.747586 2100790 out.go:177] * Verifying Kubernetes components...
	I0916 10:52:59.750545 2100790 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:52:59.987246 2100790 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:53:00.001755 2100790 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:53:00.002039 2100790 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:53:00.002115 2100790 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:53:00.002350 2100790 node_ready.go:35] waiting up to 6m0s for node "ha-234759-m03" to be "Ready" ...
	I0916 10:53:00.002445 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:00.002451 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.002460 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.002465 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.006214 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:00.007796 2100790 node_ready.go:49] node "ha-234759-m03" has status "Ready":"True"
	I0916 10:53:00.007824 2100790 node_ready.go:38] duration metric: took 5.441976ms for node "ha-234759-m03" to be "Ready" ...
	I0916 10:53:00.007835 2100790 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:53:00.007918 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:00.007925 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.007935 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.007938 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.017585 2100790 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:53:00.030856 2100790 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.031094 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:53:00.031138 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.031169 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.031189 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.035615 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:00.037228 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:00.037251 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.037262 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.037266 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.043707 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:53:00.044954 2100790 pod_ready.go:93] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:00.044979 2100790 pod_ready.go:82] duration metric: took 14.025438ms for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.044993 2100790 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.045075 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vqj8q
	I0916 10:53:00.045080 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.045089 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.045093 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.049854 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:00.051531 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:00.051612 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.051636 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.051654 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.055149 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:00.056419 2100790 pod_ready.go:93] pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:00.056503 2100790 pod_ready.go:82] duration metric: took 11.50067ms for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.056536 2100790 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.056669 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759
	I0916 10:53:00.056696 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.056733 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.056755 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.063099 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:53:00.064699 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:00.064794 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.064817 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.064837 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.069742 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:00.071220 2100790 pod_ready.go:93] pod "etcd-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:00.071309 2100790 pod_ready.go:82] duration metric: took 14.733413ms for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.071337 2100790 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.071483 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:53:00.071516 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.071552 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.071577 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.076656 2100790 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:53:00.078231 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:00.078320 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.078351 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.078370 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.083830 2100790 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:53:00.085105 2100790 pod_ready.go:93] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:00.085188 2100790 pod_ready.go:82] duration metric: took 13.826145ms for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.085223 2100790 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:00.202539 2100790 request.go:632] Waited for 117.151865ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:00.202703 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:00.202733 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.202756 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.202790 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.214865 2100790 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:53:00.414279 2100790 request.go:632] Waited for 198.21169ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:00.414357 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:00.414364 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.414374 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.414379 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.425661 2100790 round_trippers.go:574] Response Status: 200 OK in 11 milliseconds
	I0916 10:53:00.603392 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:00.603419 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.603430 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.603435 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.606262 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:00.802739 2100790 request.go:632] Waited for 195.263039ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:00.802808 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:00.802817 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:00.802826 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:00.802835 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:00.805423 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:01.085837 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:01.085859 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:01.085871 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.085877 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.096552 2100790 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:53:01.202801 2100790 request.go:632] Waited for 105.299779ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:01.202869 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:01.202878 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:01.202891 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.202901 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.210211 2100790 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:53:01.586476 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:01.586502 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:01.586512 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.586516 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.589794 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:01.602999 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:01.603032 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:01.603044 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:01.603048 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:01.606162 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.086384 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:02.086410 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:02.086421 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.086426 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.089730 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:02.090543 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:02.090564 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:02.090574 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.090578 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.093286 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:02.094011 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:02.586374 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:02.586398 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:02.586407 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.586428 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.591075 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:02.592523 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:02.592553 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:02.592571 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:02.592582 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:02.595636 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:03.086220 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:03.086252 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:03.086263 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.086269 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.089356 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:03.090129 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:03.090151 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:03.090161 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.090168 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.092945 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.585684 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:03.585705 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:03.585715 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.585718 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.588695 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:03.589311 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:03.589331 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:03.589341 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:03.589345 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:03.592384 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:04.085500 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:04.085524 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:04.085535 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.085539 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.089160 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:04.089862 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:04.089882 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:04.089893 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.089899 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.092731 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:04.586190 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:04.586210 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:04.586220 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.586225 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.589572 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:04.590394 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:04.590409 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:04.590417 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:04.590421 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:04.593236 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:04.593773 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:05.085487 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:05.085516 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:05.085527 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.085531 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.088925 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:05.089832 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:05.089853 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:05.089863 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.089869 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.092833 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:05.586027 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:05.586050 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:05.586059 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.586063 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.590347 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:05.591941 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:05.591969 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:05.591978 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:05.591985 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:05.597384 2100790 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:53:06.086344 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:06.086369 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:06.086380 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.086385 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.089638 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:06.090417 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:06.090435 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:06.090446 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.090451 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.093217 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:06.585504 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:06.585526 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:06.585536 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.585540 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.588458 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:06.589734 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:06.589753 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:06.589763 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:06.589782 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:06.592575 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:07.085516 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:07.085539 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:07.085549 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:07.085554 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:07.088961 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:07.089950 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:07.089971 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:07.089981 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:07.089987 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:07.092680 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:07.093401 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:07.585556 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:07.585581 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:07.585591 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:07.585596 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:07.589086 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:07.590010 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:07.590031 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:07.590041 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:07.590046 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:07.592854 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:08.085522 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:08.085545 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:08.085558 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:08.085563 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:08.088913 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:08.089798 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:08.089819 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:08.089829 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:08.089833 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:08.092773 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:08.585479 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:08.585502 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:08.585512 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:08.585517 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:08.588565 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:08.589189 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:08.589198 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:08.589207 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:08.589211 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:08.591841 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:09.085999 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:09.086020 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:09.086029 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:09.086034 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:09.089276 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:09.089982 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:09.089994 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:09.090003 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:09.090011 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:09.093611 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:09.094291 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:09.585630 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:09.585651 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:09.585661 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:09.585667 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:09.588738 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:09.589386 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:09.589397 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:09.589407 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:09.589412 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:09.592087 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:10.085952 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:10.085975 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:10.085985 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:10.085990 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:10.090002 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:10.091096 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:10.091125 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:10.091135 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:10.091142 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:10.094853 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:10.586045 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:10.586074 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:10.586082 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:10.586086 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:10.588861 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:10.589484 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:10.589493 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:10.589503 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:10.589508 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:10.591887 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:11.085588 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:11.085613 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:11.085623 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:11.085628 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:11.090142 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:11.092123 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:11.092144 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:11.092161 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:11.092167 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:11.107741 2100790 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 10:53:11.108712 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:11.585937 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:11.585970 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:11.585981 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:11.585985 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:11.588955 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:11.589579 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:11.589589 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:11.589598 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:11.589602 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:11.592293 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:12.086017 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:12.086044 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:12.086055 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:12.086061 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:12.089340 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:12.090125 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:12.090144 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:12.090155 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:12.090159 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:12.093762 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:12.585520 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:12.585544 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:12.585554 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:12.585558 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:12.588569 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:12.589630 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:12.589649 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:12.589659 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:12.589666 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:12.592720 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:13.085472 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:13.085493 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:13.085503 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:13.085507 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:13.088576 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:13.089559 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:13.089574 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:13.089587 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:13.089592 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:13.092798 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:13.586411 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:13.586433 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:13.586443 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:13.586447 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:13.589409 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:13.590052 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:13.590076 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:13.590087 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:13.590092 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:13.593120 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:13.593616 2100790 pod_ready.go:103] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"False"
	I0916 10:53:14.085905 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:14.085929 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:14.085939 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:14.085945 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:14.090810 2100790 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:53:14.091934 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:14.091954 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:14.091964 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:14.091970 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:14.094918 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:14.586029 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:14.586050 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:14.586060 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:14.586072 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:14.589336 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:14.590151 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:14.590169 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:14.590179 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:14.590183 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:14.593127 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:15.086009 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:53:15.086033 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.086044 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.086048 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.092809 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:53:15.093797 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:15.093827 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.093838 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.093848 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.116532 2100790 round_trippers.go:574] Response Status: 200 OK in 22 milliseconds
	I0916 10:53:15.117038 2100790 pod_ready.go:93] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.117064 2100790 pod_ready.go:82] duration metric: took 15.031795779s for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.117097 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.117172 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:53:15.117181 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.117189 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.117204 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.120370 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.121805 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:15.121829 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.121840 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.121846 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.125356 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.126239 2100790 pod_ready.go:93] pod "kube-apiserver-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.126264 2100790 pod_ready.go:82] duration metric: took 9.154626ms for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.126277 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.126345 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:53:15.126355 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.126363 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.126369 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.130022 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.130930 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:15.130990 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.131015 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.131035 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.134272 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.134952 2100790 pod_ready.go:93] pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.134979 2100790 pod_ready.go:82] duration metric: took 8.694871ms for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.134992 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.135070 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:53:15.135080 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.135097 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.135106 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.138404 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.139433 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:15.139454 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.139464 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.139468 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.142665 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.143525 2100790 pod_ready.go:93] pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.143547 2100790 pod_ready.go:82] duration metric: took 8.548303ms for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.143558 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.143623 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:53:15.143634 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.143642 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.143647 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.146853 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.147936 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:15.147998 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.148021 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.148041 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.151123 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.151930 2100790 pod_ready.go:93] pod "kube-controller-manager-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.152038 2100790 pod_ready.go:82] duration metric: took 8.470979ms for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.152065 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.286488 2100790 request.go:632] Waited for 134.323311ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:53:15.286564 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:53:15.286574 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.286583 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.286592 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.289913 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.486167 2100790 request.go:632] Waited for 195.243315ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:15.486320 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:15.486347 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.486370 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.486388 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.489669 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.490324 2100790 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.490346 2100790 pod_ready.go:82] duration metric: took 338.258699ms for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.490359 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.686344 2100790 request.go:632] Waited for 195.902732ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:53:15.686427 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:53:15.686440 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.686450 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.686455 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.689472 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:15.886562 2100790 request.go:632] Waited for 196.35073ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:15.886651 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:15.886661 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:15.886671 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:15.886747 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:15.889787 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:15.890339 2100790 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:15.890358 2100790 pod_ready.go:82] duration metric: took 399.991383ms for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:15.890370 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:16.086966 2100790 request.go:632] Waited for 196.474939ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:53:16.087068 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:53:16.087079 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:16.087087 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:16.087093 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:16.090186 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:16.286669 2100790 request.go:632] Waited for 195.729737ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:16.286761 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:16.286767 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:16.286777 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:16.286785 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:16.289535 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:16.290048 2100790 pod_ready.go:93] pod "kube-proxy-f4jm2" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:16.290074 2100790 pod_ready.go:82] duration metric: took 399.696985ms for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:16.290098 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:16.486589 2100790 request.go:632] Waited for 196.411858ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:53:16.486668 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:53:16.486753 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:16.486774 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:16.486779 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:16.489854 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:16.686773 2100790 request.go:632] Waited for 196.227408ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:16.686828 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:16.686834 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:16.686844 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:16.686850 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:16.690031 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:16.690559 2100790 pod_ready.go:93] pod "kube-proxy-gwdl4" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:16.690579 2100790 pod_ready.go:82] duration metric: took 400.468172ms for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:16.690590 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:16.886347 2100790 request.go:632] Waited for 195.64899ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:53:16.886437 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:53:16.886449 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:16.886458 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:16.886465 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:16.889625 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:17.086611 2100790 request.go:632] Waited for 196.218998ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:17.086746 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:17.086760 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:17.086770 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:17.086775 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:17.089919 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:17.090750 2100790 pod_ready.go:93] pod "kube-proxy-qrdxc" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:17.090774 2100790 pod_ready.go:82] duration metric: took 400.17672ms for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:17.090786 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:17.286333 2100790 request.go:632] Waited for 195.472589ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:53:17.286426 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:53:17.286438 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:17.286449 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:17.286454 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:17.289368 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:17.486351 2100790 request.go:632] Waited for 196.330841ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:17.486414 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:53:17.486425 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:17.486434 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:17.486440 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:17.490240 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:17.491312 2100790 pod_ready.go:93] pod "kube-scheduler-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:17.491376 2100790 pod_ready.go:82] duration metric: took 400.581132ms for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:17.491403 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:17.686830 2100790 request.go:632] Waited for 195.345518ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:53:17.686978 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:53:17.687004 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:17.687028 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:17.687045 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:17.690176 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:17.886517 2100790 request.go:632] Waited for 195.373644ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:17.886579 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:53:17.886593 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:17.886601 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:17.886605 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:17.889635 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:17.890186 2100790 pod_ready.go:93] pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:17.890206 2100790 pod_ready.go:82] duration metric: took 398.784704ms for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:17.890219 2100790 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:18.086990 2100790 request.go:632] Waited for 196.692874ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:53:18.087049 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:53:18.087056 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.087065 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.087075 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.090267 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:18.286299 2100790 request.go:632] Waited for 195.282207ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:18.286378 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:53:18.286406 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.286422 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.286427 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.289318 2100790 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:53:18.289844 2100790 pod_ready.go:93] pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:53:18.289861 2100790 pod_ready.go:82] duration metric: took 399.626602ms for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:53:18.289873 2100790 pod_ready.go:39] duration metric: took 18.282026231s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:53:18.289893 2100790 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:53:18.289958 2100790 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:53:18.302372 2100790 api_server.go:72] duration metric: took 18.557963761s to wait for apiserver process to appear ...
	I0916 10:53:18.302451 2100790 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:53:18.302482 2100790 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:53:18.310422 2100790 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:53:18.310497 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:53:18.310508 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.310528 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.310533 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.311599 2100790 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:53:18.311656 2100790 api_server.go:141] control plane version: v1.31.1
	I0916 10:53:18.311674 2100790 api_server.go:131] duration metric: took 9.20923ms to wait for apiserver health ...
	I0916 10:53:18.311682 2100790 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:53:18.487060 2100790 request.go:632] Waited for 175.31082ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:18.487169 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:18.487184 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.487194 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.487198 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.493988 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:53:18.503268 2100790 system_pods.go:59] 24 kube-system pods found
	I0916 10:53:18.503307 2100790 system_pods.go:61] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:53:18.503315 2100790 system_pods.go:61] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:53:18.503320 2100790 system_pods.go:61] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:53:18.503333 2100790 system_pods.go:61] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:53:18.503342 2100790 system_pods.go:61] "etcd-ha-234759-m03" [701fa3e0-9014-4f92-9734-6b708cb56aa0] Running
	I0916 10:53:18.503346 2100790 system_pods.go:61] "kindnet-jhkc5" [59ea0a34-9a2b-44f2-901f-2bcc13b91a1b] Running
	I0916 10:53:18.503353 2100790 system_pods.go:61] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:53:18.503357 2100790 system_pods.go:61] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:53:18.503364 2100790 system_pods.go:61] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:53:18.503368 2100790 system_pods.go:61] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:53:18.503371 2100790 system_pods.go:61] "kube-apiserver-ha-234759-m03" [136cdbc8-ac04-4ca9-85a2-17ff91df20c0] Running
	I0916 10:53:18.503376 2100790 system_pods.go:61] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:53:18.503383 2100790 system_pods.go:61] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:53:18.503387 2100790 system_pods.go:61] "kube-controller-manager-ha-234759-m03" [61a9f46c-7889-49f1-afe7-1bbce98f0b5a] Running
	I0916 10:53:18.503391 2100790 system_pods.go:61] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:53:18.503402 2100790 system_pods.go:61] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:53:18.503406 2100790 system_pods.go:61] "kube-proxy-qrdxc" [d08c1deb-9b33-4ec1-b15e-6dce465c00d9] Running
	I0916 10:53:18.503409 2100790 system_pods.go:61] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:53:18.503413 2100790 system_pods.go:61] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:53:18.503417 2100790 system_pods.go:61] "kube-scheduler-ha-234759-m03" [deab7491-330f-496f-985f-3b1882eeeb60] Running
	I0916 10:53:18.503423 2100790 system_pods.go:61] "kube-vip-ha-234759" [41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e] Running
	I0916 10:53:18.503428 2100790 system_pods.go:61] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:53:18.503433 2100790 system_pods.go:61] "kube-vip-ha-234759-m03" [44f3fbf3-8803-4d2f-884c-470cdc81e835] Running
	I0916 10:53:18.503437 2100790 system_pods.go:61] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:53:18.503446 2100790 system_pods.go:74] duration metric: took 191.758004ms to wait for pod list to return data ...
	I0916 10:53:18.503457 2100790 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:53:18.686875 2100790 request.go:632] Waited for 183.327132ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:53:18.686934 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:53:18.686940 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.686948 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.686955 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.690563 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:18.690979 2100790 default_sa.go:45] found service account: "default"
	I0916 10:53:18.691005 2100790 default_sa.go:55] duration metric: took 187.537042ms for default service account to be created ...
	I0916 10:53:18.691015 2100790 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:53:18.886349 2100790 request.go:632] Waited for 195.266535ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:18.886417 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:53:18.886424 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:18.886438 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:18.886447 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:18.892680 2100790 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:53:18.901986 2100790 system_pods.go:86] 24 kube-system pods found
	I0916 10:53:18.903207 2100790 system_pods.go:89] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:53:18.903237 2100790 system_pods.go:89] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:53:18.903268 2100790 system_pods.go:89] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:53:18.903292 2100790 system_pods.go:89] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:53:18.903313 2100790 system_pods.go:89] "etcd-ha-234759-m03" [701fa3e0-9014-4f92-9734-6b708cb56aa0] Running
	I0916 10:53:18.903336 2100790 system_pods.go:89] "kindnet-jhkc5" [59ea0a34-9a2b-44f2-901f-2bcc13b91a1b] Running
	I0916 10:53:18.903354 2100790 system_pods.go:89] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:53:18.903389 2100790 system_pods.go:89] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:53:18.903408 2100790 system_pods.go:89] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:53:18.903428 2100790 system_pods.go:89] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:53:18.903446 2100790 system_pods.go:89] "kube-apiserver-ha-234759-m03" [136cdbc8-ac04-4ca9-85a2-17ff91df20c0] Running
	I0916 10:53:18.903474 2100790 system_pods.go:89] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:53:18.903498 2100790 system_pods.go:89] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:53:18.903802 2100790 system_pods.go:89] "kube-controller-manager-ha-234759-m03" [61a9f46c-7889-49f1-afe7-1bbce98f0b5a] Running
	I0916 10:53:18.903839 2100790 system_pods.go:89] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:53:18.908336 2100790 system_pods.go:89] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:53:18.909417 2100790 system_pods.go:89] "kube-proxy-qrdxc" [d08c1deb-9b33-4ec1-b15e-6dce465c00d9] Running
	I0916 10:53:18.909440 2100790 system_pods.go:89] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:53:18.909459 2100790 system_pods.go:89] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:53:18.909491 2100790 system_pods.go:89] "kube-scheduler-ha-234759-m03" [deab7491-330f-496f-985f-3b1882eeeb60] Running
	I0916 10:53:18.909515 2100790 system_pods.go:89] "kube-vip-ha-234759" [41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e] Running
	I0916 10:53:18.909534 2100790 system_pods.go:89] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:53:18.909549 2100790 system_pods.go:89] "kube-vip-ha-234759-m03" [44f3fbf3-8803-4d2f-884c-470cdc81e835] Running
	I0916 10:53:18.909567 2100790 system_pods.go:89] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:53:18.909599 2100790 system_pods.go:126] duration metric: took 218.575231ms to wait for k8s-apps to be running ...
	I0916 10:53:18.909628 2100790 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:53:18.909725 2100790 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:53:18.924027 2100790 system_svc.go:56] duration metric: took 14.389539ms WaitForService to wait for kubelet
	I0916 10:53:18.924100 2100790 kubeadm.go:582] duration metric: took 19.179694507s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:53:18.924151 2100790 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:53:19.086539 2100790 request.go:632] Waited for 162.294484ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:53:19.086598 2100790 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:53:19.086603 2100790 round_trippers.go:469] Request Headers:
	I0916 10:53:19.086612 2100790 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:53:19.086621 2100790 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:53:19.090275 2100790 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:53:19.091484 2100790 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:53:19.091512 2100790 node_conditions.go:123] node cpu capacity is 2
	I0916 10:53:19.091523 2100790 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:53:19.091529 2100790 node_conditions.go:123] node cpu capacity is 2
	I0916 10:53:19.091533 2100790 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:53:19.091538 2100790 node_conditions.go:123] node cpu capacity is 2
	I0916 10:53:19.091543 2100790 node_conditions.go:105] duration metric: took 167.371514ms to run NodePressure ...
	I0916 10:53:19.091559 2100790 start.go:241] waiting for startup goroutines ...
	I0916 10:53:19.091586 2100790 start.go:255] writing updated cluster config ...
	I0916 10:53:19.091920 2100790 ssh_runner.go:195] Run: rm -f paused
	I0916 10:53:19.101498 2100790 out.go:177] * Done! kubectl is now configured to use "ha-234759" cluster and "default" namespace by default
	E0916 10:53:19.103908 2100790 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	7e2e09055b617       89a35e2ebb6b9       About a minute ago   Running             busybox                   0                   59f93d3cf6ebc       busybox-7dff88458-kjr9x
	0e47bf675d7df       2f6c962e7b831       3 minutes ago        Running             coredns                   0                   297acf9bc71e4       coredns-7c65d6cfc9-2l4br
	e629c24c41e32       2f6c962e7b831       3 minutes ago        Running             coredns                   0                   88f003522915c       coredns-7c65d6cfc9-vqj8q
	2586d6167e755       ba04bb24b9575       3 minutes ago        Running             storage-provisioner       0                   97c9faf1ef6b9       storage-provisioner
	7d51a8f7f42ff       6a23fa8fd2b78       3 minutes ago        Running             kindnet-cni               0                   ae1a0829d833f       kindnet-q8nl6
	900d2ad5148fe       24a140c548c07       3 minutes ago        Running             kube-proxy                0                   4e5ecfb50c3cd       kube-proxy-gwdl4
	33346b72c3ec8       7e2a4e229620b       3 minutes ago        Running             kube-vip                  0                   0a388c673a7b6       kube-vip-ha-234759
	324a547043689       27e3830e14027       3 minutes ago        Running             etcd                      0                   fa0cb25bfd24a       etcd-ha-234759
	5a7d53b11a05f       7f8aa378bb47d       3 minutes ago        Running             kube-scheduler            0                   56f2eb27f3396       kube-scheduler-ha-234759
	a7002833ce71b       279f381cb3736       3 minutes ago        Running             kube-controller-manager   0                   d4248c2bf66dc       kube-controller-manager-ha-234759
	fd48034050bae       d3f53a98c0a9d       3 minutes ago        Running             kube-apiserver            0                   2d1650dd1ced5       kube-apiserver-ha-234759
	
	
	==> containerd <==
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.455045464Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.455058214Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.455142702Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.600534643Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-7c65d6cfc9-2l4br,Uid:18d893a2-274a-413e-bf3d-0dd1e88a9984,Namespace:kube-system,Attempt:0,} returns sandbox id \"297acf9bc71e47516551a5df82eefb57a4c11f5167fc46f762281843c0061c49\""
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.608009960Z" level=info msg="CreateContainer within sandbox \"297acf9bc71e47516551a5df82eefb57a4c11f5167fc46f762281843c0061c49\" for container &ContainerMetadata{Name:coredns,Attempt:0,}"
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.632002179Z" level=info msg="CreateContainer within sandbox \"297acf9bc71e47516551a5df82eefb57a4c11f5167fc46f762281843c0061c49\" for &ContainerMetadata{Name:coredns,Attempt:0,} returns container id \"0e47bf675d7dfaa397cb12c3a975c0fdae535cdf7989381139d757ac5f8a5eaf\""
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.635432796Z" level=info msg="StartContainer for \"0e47bf675d7dfaa397cb12c3a975c0fdae535cdf7989381139d757ac5f8a5eaf\""
	Sep 16 10:52:04 ha-234759 containerd[821]: time="2024-09-16T10:52:04.766728481Z" level=info msg="StartContainer for \"0e47bf675d7dfaa397cb12c3a975c0fdae535cdf7989381139d757ac5f8a5eaf\" returns successfully"
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.025128980Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-7dff88458-kjr9x,Uid:18a7f530-b34d-413a-9028-13511f5b9be6,Namespace:default,Attempt:0,}"
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.172814800Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.172963115Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.173004165Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.173141527Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.312044969Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-7dff88458-kjr9x,Uid:18a7f530-b34d-413a-9028-13511f5b9be6,Namespace:default,Attempt:0,} returns sandbox id \"59f93d3cf6ebc1a2852f89874b510bf9172a820a16c52d9b9ad1b24fcdfc4bea\""
	Sep 16 10:53:21 ha-234759 containerd[821]: time="2024-09-16T10:53:21.317796925Z" level=info msg="PullImage \"gcr.io/k8s-minikube/busybox:1.28\""
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.213571160Z" level=info msg="ImageCreate event name:\"gcr.io/k8s-minikube/busybox:1.28\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.216616589Z" level=info msg="stop pulling image gcr.io/k8s-minikube/busybox:1.28: active requests=0, bytes read=766310"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.222099534Z" level=info msg="ImageCreate event name:\"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.226269862Z" level=info msg="ImageCreate event name:\"gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.227094678Z" level=info msg="Pulled image \"gcr.io/k8s-minikube/busybox:1.28\" with image id \"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\", repo tag \"gcr.io/k8s-minikube/busybox:1.28\", repo digest \"gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12\", size \"764554\" in 1.909236313s"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.227135785Z" level=info msg="PullImage \"gcr.io/k8s-minikube/busybox:1.28\" returns image reference \"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\""
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.233718554Z" level=info msg="CreateContainer within sandbox \"59f93d3cf6ebc1a2852f89874b510bf9172a820a16c52d9b9ad1b24fcdfc4bea\" for container &ContainerMetadata{Name:busybox,Attempt:0,}"
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.255235031Z" level=info msg="CreateContainer within sandbox \"59f93d3cf6ebc1a2852f89874b510bf9172a820a16c52d9b9ad1b24fcdfc4bea\" for &ContainerMetadata{Name:busybox,Attempt:0,} returns container id \"7e2e09055b61790f45d02e3260c05daee3be55c8b2fc6527f43462f3b1cc91f3\""
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.256459335Z" level=info msg="StartContainer for \"7e2e09055b61790f45d02e3260c05daee3be55c8b2fc6527f43462f3b1cc91f3\""
	Sep 16 10:53:23 ha-234759 containerd[821]: time="2024-09-16T10:53:23.317952234Z" level=info msg="StartContainer for \"7e2e09055b61790f45d02e3260c05daee3be55c8b2fc6527f43462f3b1cc91f3\" returns successfully"
	
	
	==> coredns [0e47bf675d7dfaa397cb12c3a975c0fdae535cdf7989381139d757ac5f8a5eaf] <==
	[INFO] 10.244.2.2:52667 - 3 "AAAA IN kubernetes.io. udp 31 false 512" NOERROR qr,rd,ra 31 0.001304304s
	[INFO] 10.244.1.2:37083 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000164668s
	[INFO] 10.244.1.2:49488 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.00026353s
	[INFO] 10.244.1.2:56285 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.002384855s
	[INFO] 10.244.1.2:35002 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000170698s
	[INFO] 10.244.1.2:50858 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000131306s
	[INFO] 10.244.0.4:38621 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000076611s
	[INFO] 10.244.0.4:36661 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001075152s
	[INFO] 10.244.0.4:53651 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000141973s
	[INFO] 10.244.2.2:45377 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000162452s
	[INFO] 10.244.2.2:43234 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000100759s
	[INFO] 10.244.1.2:43502 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000106273s
	[INFO] 10.244.0.4:55514 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000173914s
	[INFO] 10.244.0.4:55773 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000884467s
	[INFO] 10.244.0.4:41665 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000052447s
	[INFO] 10.244.2.2:41797 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000094014s
	[INFO] 10.244.2.2:36525 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000069365s
	[INFO] 10.244.2.2:43068 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000061341s
	[INFO] 10.244.1.2:60478 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000124816s
	[INFO] 10.244.1.2:59811 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000107117s
	[INFO] 10.244.0.4:38611 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000104008s
	[INFO] 10.244.0.4:58312 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000183285s
	[INFO] 10.244.0.4:37216 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000090026s
	[INFO] 10.244.2.2:35594 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000105205s
	[INFO] 10.244.2.2:35249 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000187798s
	
	
	==> coredns [e629c24c41e32603dc9a53125aa7122a5a0c58d985e95165cffe89d5670988c4] <==
	[INFO] 10.244.2.2:55324 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 44 0.001285121s
	[INFO] 10.244.1.2:48295 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.002432526s
	[INFO] 10.244.1.2:55741 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000227814s
	[INFO] 10.244.1.2:36649 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000202985s
	[INFO] 10.244.0.4:56384 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000102695s
	[INFO] 10.244.0.4:40529 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.00123465s
	[INFO] 10.244.0.4:54004 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000137197s
	[INFO] 10.244.0.4:51298 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000069636s
	[INFO] 10.244.0.4:46099 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000151031s
	[INFO] 10.244.2.2:44482 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001418231s
	[INFO] 10.244.2.2:41395 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000067806s
	[INFO] 10.244.2.2:34678 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000130428s
	[INFO] 10.244.2.2:45582 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001555683s
	[INFO] 10.244.2.2:39632 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000067101s
	[INFO] 10.244.2.2:46573 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00010962s
	[INFO] 10.244.1.2:45854 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000124578s
	[INFO] 10.244.1.2:52505 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000082141s
	[INFO] 10.244.1.2:54504 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000106543s
	[INFO] 10.244.0.4:43966 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000198588s
	[INFO] 10.244.2.2:57482 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000082715s
	[INFO] 10.244.1.2:42996 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000114255s
	[INFO] 10.244.1.2:54974 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.00016132s
	[INFO] 10.244.0.4:36323 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000157373s
	[INFO] 10.244.2.2:48775 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000152442s
	[INFO] 10.244.2.2:50527 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000112869s
	
	
	==> describe nodes <==
	Name:               ha-234759
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_51_47_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:51:45 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:55:10 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:53:49 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:53:49 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:53:49 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:53:49 +0000   Mon, 16 Sep 2024 10:51:46 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    ha-234759
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 9135db83b7fa4a89a9709e47daa481e7
	  System UUID:                2a58ed5f-69e8-4ab8-a10e-2a95cf1d9dec
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (11 in total)
	  Namespace                   Name                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                 ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-kjr9x              0 (0%)        0 (0%)      0 (0%)           0 (0%)         117s
	  kube-system                 coredns-7c65d6cfc9-2l4br             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m26s
	  kube-system                 coredns-7c65d6cfc9-vqj8q             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m26s
	  kube-system                 etcd-ha-234759                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m31s
	  kube-system                 kindnet-q8nl6                        100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m26s
	  kube-system                 kube-apiserver-ha-234759             250m (12%)    0 (0%)      0 (0%)           0 (0%)         3m31s
	  kube-system                 kube-controller-manager-ha-234759    200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m31s
	  kube-system                 kube-proxy-gwdl4                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m26s
	  kube-system                 kube-scheduler-ha-234759             100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m31s
	  kube-system                 kube-vip-ha-234759                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m31s
	  kube-system                 storage-provisioner                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m25s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             290Mi (3%)  390Mi (4%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age    From             Message
	  ----     ------                   ----   ----             -------
	  Normal   Starting                 3m25s  kube-proxy       
	  Normal   Starting                 3m31s  kubelet          Starting kubelet.
	  Warning  CgroupV1                 3m31s  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  3m31s  kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeReady                3m31s  kubelet          Node ha-234759 status is now: NodeReady
	  Normal   NodeHasSufficientMemory  3m31s  kubelet          Node ha-234759 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m31s  kubelet          Node ha-234759 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m31s  kubelet          Node ha-234759 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           3m27s  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           2m56s  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           2m12s  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           6s     node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	
	
	Name:               ha-234759-m02
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_52_13_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:52:10 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:55:12 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:55:12 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:55:12 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:55:12 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:55:12 +0000   Mon, 16 Sep 2024 10:52:11 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.3
	  Hostname:    ha-234759-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 893567448e9948b887d9887c908e1ca6
	  System UUID:                ee72b9d9-548d-49fb-8dc5-aa6839abad7f
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-7l4g7                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         117s
	  kube-system                 etcd-ha-234759-m02                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         3m5s
	  kube-system                 kindnet-svsnq                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m7s
	  kube-system                 kube-apiserver-ha-234759-m02             250m (12%)    0 (0%)      0 (0%)           0 (0%)         3m6s
	  kube-system                 kube-controller-manager-ha-234759-m02    200m (10%)    0 (0%)      0 (0%)           0 (0%)         3m5s
	  kube-system                 kube-proxy-f4jm2                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m7s
	  kube-system                 kube-scheduler-ha-234759-m02             100m (5%)     0 (0%)      0 (0%)           0 (0%)         3m6s
	  kube-system                 kube-vip-ha-234759-m02                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m2s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                  From             Message
	  ----     ------                   ----                 ----             -------
	  Normal   Starting                 2m59s                kube-proxy       
	  Normal   NodeHasSufficientPID     3m7s (x7 over 3m7s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasNoDiskPressure    3m7s (x7 over 3m7s)  kubelet          Node ha-234759-m02 status is now: NodeHasNoDiskPressure
	  Warning  CgroupV1                 3m7s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   Starting                 3m7s                 kubelet          Starting kubelet.
	  Normal   NodeAllocatableEnforced  3m7s                 kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  3m7s (x8 over 3m7s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientMemory
	  Normal   RegisteredNode           3m7s                 node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   RegisteredNode           2m56s                node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   RegisteredNode           2m12s                node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   Starting                 18s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 18s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  18s (x8 over 18s)    kubelet          Node ha-234759-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    18s (x7 over 18s)    kubelet          Node ha-234759-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     18s (x7 over 18s)    kubelet          Node ha-234759-m02 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  18s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           6s                   node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	
	
	Name:               ha-234759-m03
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_52_59_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:52:54 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759-m03
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:55:17 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:53:56 +0000   Mon, 16 Sep 2024 10:52:54 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:53:56 +0000   Mon, 16 Sep 2024 10:52:54 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:53:56 +0000   Mon, 16 Sep 2024 10:52:54 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:53:56 +0000   Mon, 16 Sep 2024 10:52:55 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.4
	  Hostname:    ha-234759-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 16dbc83e7476410281e7051b011cace5
	  System UUID:                97682363-4679-4dad-b2b1-8d6fd4a34715
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-m9lsb                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         117s
	  kube-system                 etcd-ha-234759-m03                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         2m20s
	  kube-system                 kindnet-jhkc5                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      2m23s
	  kube-system                 kube-apiserver-ha-234759-m03             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m20s
	  kube-system                 kube-controller-manager-ha-234759-m03    200m (10%)    0 (0%)      0 (0%)           0 (0%)         2m20s
	  kube-system                 kube-proxy-qrdxc                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m23s
	  kube-system                 kube-scheduler-ha-234759-m03             100m (5%)     0 (0%)      0 (0%)           0 (0%)         2m20s
	  kube-system                 kube-vip-ha-234759-m03                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m16s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type    Reason                   Age                    From             Message
	  ----    ------                   ----                   ----             -------
	  Normal  Starting                 2m15s                  kube-proxy       
	  Normal  NodeHasSufficientMemory  2m23s (x8 over 2m23s)  kubelet          Node ha-234759-m03 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    2m23s (x7 over 2m23s)  kubelet          Node ha-234759-m03 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     2m23s (x7 over 2m23s)  kubelet          Node ha-234759-m03 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  2m23s                  kubelet          Updated Node Allocatable limit across pods
	  Normal  RegisteredNode           2m22s                  node-controller  Node ha-234759-m03 event: Registered Node ha-234759-m03 in Controller
	  Normal  RegisteredNode           2m21s                  node-controller  Node ha-234759-m03 event: Registered Node ha-234759-m03 in Controller
	  Normal  RegisteredNode           2m12s                  node-controller  Node ha-234759-m03 event: Registered Node ha-234759-m03 in Controller
	  Normal  RegisteredNode           6s                     node-controller  Node ha-234759-m03 event: Registered Node ha-234759-m03 in Controller
	
	
	Name:               ha-234759-m04
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759-m04
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_54_13_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:54:12 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759-m04
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:55:13 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:54:13 +0000   Mon, 16 Sep 2024 10:54:12 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:54:13 +0000   Mon, 16 Sep 2024 10:54:12 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:54:13 +0000   Mon, 16 Sep 2024 10:54:12 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:54:13 +0000   Mon, 16 Sep 2024 10:54:13 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.5
	  Hostname:    ha-234759-m04
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 a8ed48d03d3c49bab7b6f3dbb66aab2e
	  System UUID:                3f4e61b4-061e-4448-a9f3-3c0401d9b215
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.3.0/24
	PodCIDRs:                     10.244.3.0/24
	Non-terminated Pods:          (2 in total)
	  Namespace                   Name                CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                ------------  ----------  ---------------  -------------  ---
	  kube-system                 kindnet-lwtj4       100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      65s
	  kube-system                 kube-proxy-m84xg    0 (0%)        0 (0%)      0 (0%)           0 (0%)         65s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 62s                kube-proxy       
	  Normal  NodeHasSufficientMemory  65s (x2 over 65s)  kubelet          Node ha-234759-m04 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    65s (x2 over 65s)  kubelet          Node ha-234759-m04 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     65s (x2 over 65s)  kubelet          Node ha-234759-m04 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  65s                kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeReady                64s                kubelet          Node ha-234759-m04 status is now: NodeReady
	  Normal  RegisteredNode           62s                node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal  RegisteredNode           62s                node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal  RegisteredNode           61s                node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal  RegisteredNode           6s                 node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	
	
	==> dmesg <==
	
	
	==> etcd [324a5470436890084c7d201c8b4f70a15952b517d95c7e4094491c0aafb39871] <==
	{"level":"info","ts":"2024-09-16T10:52:57.064481Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"warn","ts":"2024-09-16T10:52:57.272545Z","caller":"etcdhttp/peer.go:150","msg":"failed to promote a member","member-id":"94f3900974800f10","error":"etcdserver: can only promote a learner member which is in sync with leader"}
	{"level":"info","ts":"2024-09-16T10:52:58.294139Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(4276690428076244665 10733080707237678864 12593026477526642892)"}
	{"level":"info","ts":"2024-09-16T10:52:58.294311Z","caller":"membership/cluster.go:535","msg":"promote member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc"}
	{"level":"info","ts":"2024-09-16T10:52:58.294386Z","caller":"etcdserver/server.go:1996","msg":"applied a configuration change through raft","local-member-id":"aec36adc501070cc","raft-conf-change":"ConfChangeAddNode","raft-conf-change-node-id":"94f3900974800f10"}
	{"level":"warn","ts":"2024-09-16T10:54:44.699715Z","caller":"rafthttp/stream.go:421","msg":"lost TCP streaming connection with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"3b59db4913cc3eb9","error":"unexpected EOF"}
	{"level":"warn","ts":"2024-09-16T10:54:44.700189Z","caller":"rafthttp/peer_status.go:66","msg":"peer became inactive (message send to peer failed)","peer-id":"3b59db4913cc3eb9","error":"failed to read 3b59db4913cc3eb9 on stream Message (unexpected EOF)"}
	{"level":"warn","ts":"2024-09-16T10:54:44.701148Z","caller":"rafthttp/stream.go:421","msg":"lost TCP streaming connection with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"3b59db4913cc3eb9","error":"unexpected EOF"}
	{"level":"warn","ts":"2024-09-16T10:54:44.814493Z","caller":"rafthttp/stream.go:223","msg":"lost TCP streaming connection with remote peer","stream-writer-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"3b59db4913cc3eb9"}
	{"level":"warn","ts":"2024-09-16T10:54:45.610145Z","caller":"rafthttp/stream.go:194","msg":"lost TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"3b59db4913cc3eb9"}
	{"level":"warn","ts":"2024-09-16T10:54:45.632871Z","caller":"etcdserver/cluster_util.go:294","msg":"failed to reach the peer URL","address":"https://192.168.49.3:2380/version","remote-member-id":"3b59db4913cc3eb9","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:54:45.632943Z","caller":"etcdserver/cluster_util.go:158","msg":"failed to get version","remote-member-id":"3b59db4913cc3eb9","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:54:49.634946Z","caller":"etcdserver/cluster_util.go:294","msg":"failed to reach the peer URL","address":"https://192.168.49.3:2380/version","remote-member-id":"3b59db4913cc3eb9","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:54:49.635001Z","caller":"etcdserver/cluster_util.go:158","msg":"failed to get version","remote-member-id":"3b59db4913cc3eb9","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:54:53.635976Z","caller":"etcdserver/cluster_util.go:294","msg":"failed to reach the peer URL","address":"https://192.168.49.3:2380/version","remote-member-id":"3b59db4913cc3eb9","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:54:53.636030Z","caller":"etcdserver/cluster_util.go:158","msg":"failed to get version","remote-member-id":"3b59db4913cc3eb9","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:54:58.663178Z","caller":"etcdserver/cluster_util.go:294","msg":"failed to reach the peer URL","address":"https://192.168.49.3:2380/version","remote-member-id":"3b59db4913cc3eb9","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:54:58.663287Z","caller":"etcdserver/cluster_util.go:158","msg":"failed to get version","remote-member-id":"3b59db4913cc3eb9","error":"Get \"https://192.168.49.3:2380/version\": dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"info","ts":"2024-09-16T10:55:01.440330Z","caller":"rafthttp/peer_status.go:53","msg":"peer became active","peer-id":"3b59db4913cc3eb9"}
	{"level":"info","ts":"2024-09-16T10:55:01.442972Z","caller":"rafthttp/stream.go:412","msg":"established TCP streaming connection with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"3b59db4913cc3eb9"}
	{"level":"info","ts":"2024-09-16T10:55:01.452373Z","caller":"rafthttp/stream.go:412","msg":"established TCP streaming connection with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"3b59db4913cc3eb9"}
	{"level":"info","ts":"2024-09-16T10:55:01.541825Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"3b59db4913cc3eb9","stream-type":"stream Message"}
	{"level":"info","ts":"2024-09-16T10:55:01.541868Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"3b59db4913cc3eb9"}
	{"level":"info","ts":"2024-09-16T10:55:01.683439Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"3b59db4913cc3eb9","stream-type":"stream MsgApp v2"}
	{"level":"info","ts":"2024-09-16T10:55:01.683490Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"3b59db4913cc3eb9"}
	
	
	==> kernel <==
	 10:55:17 up 1 day, 14:37,  0 users,  load average: 2.52, 2.07, 1.66
	Linux ha-234759 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [7d51a8f7f42ff3becfe558f4f4801bec107af9426327a36d60c9cf3b27276148] <==
	I0916 10:54:42.833216       1 main.go:299] handling current node
	I0916 10:54:52.822973       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:54:52.823007       1 main.go:299] handling current node
	I0916 10:54:52.823022       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:54:52.823028       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:54:52.823249       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:54:52.823268       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:54:52.823331       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:54:52.823367       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:55:02.831935       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:55:02.832031       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:55:02.832210       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:55:02.832267       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:55:02.832374       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:55:02.832447       1 main.go:299] handling current node
	I0916 10:55:02.832486       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:55:02.832524       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:55:12.823725       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:55:12.823776       1 main.go:299] handling current node
	I0916 10:55:12.823795       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:55:12.823803       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:55:12.824105       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:55:12.824129       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:55:12.824456       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:55:12.824480       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	
	
	==> kube-apiserver [fd48034050bae874c9e190debc0b1bfa138cdf53fc5887bd15f027e7346ca82d] <==
	I0916 10:51:45.367599       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 10:51:45.427785       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 10:51:45.557714       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 10:51:45.564282       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2]
	I0916 10:51:45.565434       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:51:45.571193       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 10:51:45.591261       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 10:51:46.453614       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 10:51:46.467333       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 10:51:46.479997       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 10:51:51.170135       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 10:51:51.345002       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	E0916 10:53:51.206555       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34472: use of closed network connection
	E0916 10:53:51.712393       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34496: use of closed network connection
	E0916 10:53:51.999923       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34514: use of closed network connection
	E0916 10:53:52.251265       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34536: use of closed network connection
	E0916 10:53:52.485147       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34550: use of closed network connection
	E0916 10:53:52.972588       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34588: use of closed network connection
	E0916 10:53:53.602153       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34646: use of closed network connection
	E0916 10:53:53.850431       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34666: use of closed network connection
	E0916 10:53:54.095517       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34690: use of closed network connection
	E0916 10:53:54.340408       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34702: use of closed network connection
	E0916 10:53:54.582226       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34734: use of closed network connection
	E0916 10:53:54.851029       1 conn.go:339] Error on socket receive: read tcp 192.168.49.254:8443->192.168.49.1:34748: use of closed network connection
	W0916 10:54:55.575057       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2 192.168.49.4]
	
	
	==> kube-controller-manager [a7002833ce71be5c884b6b01ea4b5b23ca5c4dbd6a84cfaeed6b4d3e9829e35b] <==
	I0916 10:53:23.592099       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="56.373498ms"
	I0916 10:53:23.592345       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="205.816µs"
	I0916 10:53:23.758331       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="22.334736ms"
	I0916 10:53:23.792433       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="33.570342ms"
	I0916 10:53:23.794530       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="1.638185ms"
	I0916 10:53:24.744495       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="76.439µs"
	I0916 10:53:25.590578       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	I0916 10:53:41.904648       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m02"
	I0916 10:53:49.302045       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759"
	I0916 10:53:50.414741       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="58.307681ms"
	I0916 10:53:50.499441       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="84.648734ms"
	I0916 10:53:50.499782       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="65.657µs"
	I0916 10:53:56.200551       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	E0916 10:54:12.039568       1 certificate_controller.go:151] "Unhandled Error" err="Sync csr-q5rd8 failed with : error updating signature for csr: Operation cannot be fulfilled on certificatesigningrequests.certificates.k8s.io \"csr-q5rd8\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:54:12.223078       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"ha-234759-m04\" does not exist"
	I0916 10:54:12.277872       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="ha-234759-m04" podCIDRs=["10.244.3.0/24"]
	I0916 10:54:12.278590       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:12.278799       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:12.685159       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:13.216047       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:13.275364       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:13.275441       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="ha-234759-m04"
	I0916 10:54:13.299972       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:15.629918       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="ha-234759-m04"
	I0916 10:55:12.701175       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m02"
	
	
	==> kube-proxy [900d2ad5148fe65aefe93ce1d29763ab71494f94b5511bca347f273235ccc038] <==
	I0916 10:51:52.177892       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:51:52.290574       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:51:52.290638       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:51:52.352222       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:51:52.352350       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:51:52.354429       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:51:52.355024       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:51:52.355194       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:51:52.362196       1 config.go:199] "Starting service config controller"
	I0916 10:51:52.362316       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:51:52.362400       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:51:52.362458       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:51:52.363411       1 config.go:328] "Starting node config controller"
	I0916 10:51:52.365650       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:51:52.462531       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:51:52.462632       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:51:52.471949       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [5a7d53b11a05f48872cdd02a26e2074bdb1c6ee6e353ceb2ff9519faca117d67] <==
	E0916 10:53:20.488702       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-kjr9x\": pod busybox-7dff88458-kjr9x is already assigned to node \"ha-234759\"" pod="default/busybox-7dff88458-kjr9x"
	I0916 10:53:20.488719       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-kjr9x" node="ha-234759"
	E0916 10:53:20.489597       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-m9lsb\": pod busybox-7dff88458-m9lsb is already assigned to node \"ha-234759-m03\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-m9lsb" node="ha-234759-m03"
	E0916 10:53:20.489638       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod af6e1f4d-184c-4d9d-bed7-b49448f6daa9(default/busybox-7dff88458-m9lsb) wasn't assumed so cannot be forgotten" pod="default/busybox-7dff88458-m9lsb"
	E0916 10:53:20.489651       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-m9lsb\": pod busybox-7dff88458-m9lsb is already assigned to node \"ha-234759-m03\"" pod="default/busybox-7dff88458-m9lsb"
	I0916 10:53:20.489677       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-m9lsb" node="ha-234759-m03"
	E0916 10:54:12.393673       1 schedule_one.go:953] "Scheduler cache AssumePod failed" err="pod 10919f4b-06e2-4ba9-8ed7-6a6493352be5(kube-system/kube-proxy-xscmm) is in the cache, so can't be assumed" pod="kube-system/kube-proxy-xscmm"
	E0916 10:54:12.393715       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="pod 10919f4b-06e2-4ba9-8ed7-6a6493352be5(kube-system/kube-proxy-xscmm) is in the cache, so can't be assumed" pod="kube-system/kube-proxy-xscmm"
	I0916 10:54:12.393736       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-xscmm" node="ha-234759-m04"
	E0916 10:54:12.419351       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-sk6c5\": pod kindnet-sk6c5 is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kindnet-sk6c5" node="ha-234759-m04"
	E0916 10:54:12.419403       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 021eac3e-1cf0-40c0-a4e8-6bfe73a62a75(kube-system/kindnet-sk6c5) wasn't assumed so cannot be forgotten" pod="kube-system/kindnet-sk6c5"
	E0916 10:54:12.419578       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-sk6c5\": pod kindnet-sk6c5 is already assigned to node \"ha-234759-m04\"" pod="kube-system/kindnet-sk6c5"
	I0916 10:54:12.419725       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kindnet-sk6c5" node="ha-234759-m04"
	E0916 10:54:12.434533       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-m84xg\": pod kube-proxy-m84xg is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-m84xg" node="ha-234759-m04"
	E0916 10:54:12.434587       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 6b84ade0-ae34-4c7e-b1ed-e9c83f4bf130(kube-system/kube-proxy-m84xg) wasn't assumed so cannot be forgotten" pod="kube-system/kube-proxy-m84xg"
	E0916 10:54:12.434606       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-m84xg\": pod kube-proxy-m84xg is already assigned to node \"ha-234759-m04\"" pod="kube-system/kube-proxy-m84xg"
	I0916 10:54:12.434653       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-m84xg" node="ha-234759-m04"
	E0916 10:54:12.656470       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-c59dr\": pod kindnet-c59dr is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kindnet-c59dr" node="ha-234759-m04"
	E0916 10:54:12.656573       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 929f6efd-2b9a-4b18-919d-36fc692d45c4(kube-system/kindnet-c59dr) wasn't assumed so cannot be forgotten" pod="kube-system/kindnet-c59dr"
	E0916 10:54:12.656663       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-c59dr\": pod kindnet-c59dr is already assigned to node \"ha-234759-m04\"" pod="kube-system/kindnet-c59dr"
	I0916 10:54:12.656746       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kindnet-c59dr" node="ha-234759-m04"
	E0916 10:54:12.678394       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-zcn6b\": pod kube-proxy-zcn6b is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-zcn6b" node="ha-234759-m04"
	E0916 10:54:12.678469       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 66cac608-b205-4573-afb9-4e337fdadf3c(kube-system/kube-proxy-zcn6b) wasn't assumed so cannot be forgotten" pod="kube-system/kube-proxy-zcn6b"
	E0916 10:54:12.678502       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-zcn6b\": pod kube-proxy-zcn6b is already assigned to node \"ha-234759-m04\"" pod="kube-system/kube-proxy-zcn6b"
	I0916 10:54:12.678547       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-zcn6b" node="ha-234759-m04"
	
	
	==> kubelet <==
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.419239    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ee79653f-8a9e-41e2-82bb-7b08fc87e265-lib-modules\") pod \"kindnet-q8nl6\" (UID: \"ee79653f-8a9e-41e2-82bb-7b08fc87e265\") " pod="kube-system/kindnet-q8nl6"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.419287    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw9tx\" (UniqueName: \"kubernetes.io/projected/8ea118a7-cc54-4dd9-8bb2-cfc133a376fc-kube-api-access-dw9tx\") pod \"kube-proxy-gwdl4\" (UID: \"8ea118a7-cc54-4dd9-8bb2-cfc133a376fc\") " pod="kube-system/kube-proxy-gwdl4"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.520298    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/18d893a2-274a-413e-bf3d-0dd1e88a9984-config-volume\") pod \"coredns-7c65d6cfc9-2l4br\" (UID: \"18d893a2-274a-413e-bf3d-0dd1e88a9984\") " pod="kube-system/coredns-7c65d6cfc9-2l4br"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.520433    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c14618b-0831-4e8b-be9e-ba0049031bdb-config-volume\") pod \"coredns-7c65d6cfc9-vqj8q\" (UID: \"2c14618b-0831-4e8b-be9e-ba0049031bdb\") " pod="kube-system/coredns-7c65d6cfc9-vqj8q"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.520456    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gfn5\" (UniqueName: \"kubernetes.io/projected/18d893a2-274a-413e-bf3d-0dd1e88a9984-kube-api-access-2gfn5\") pod \"coredns-7c65d6cfc9-2l4br\" (UID: \"18d893a2-274a-413e-bf3d-0dd1e88a9984\") " pod="kube-system/coredns-7c65d6cfc9-2l4br"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.520480    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pkdh\" (UniqueName: \"kubernetes.io/projected/2c14618b-0831-4e8b-be9e-ba0049031bdb-kube-api-access-4pkdh\") pod \"coredns-7c65d6cfc9-vqj8q\" (UID: \"2c14618b-0831-4e8b-be9e-ba0049031bdb\") " pod="kube-system/coredns-7c65d6cfc9-vqj8q"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: I0916 10:51:51.555785    1577 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.870339    1577 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\": failed to find network info for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\""
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.870422    1577 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\": failed to find network info for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\"" pod="kube-system/coredns-7c65d6cfc9-vqj8q"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.870444    1577 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\": failed to find network info for sandbox \"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\"" pod="kube-system/coredns-7c65d6cfc9-vqj8q"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.870487    1577 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-vqj8q_kube-system(2c14618b-0831-4e8b-be9e-ba0049031bdb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-vqj8q_kube-system(2c14618b-0831-4e8b-be9e-ba0049031bdb)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\\\": failed to find network info for sandbox \\\"450b5a6904e234b0951e85614b5c312b3a570d13ed6385cf3d785a2bb816c6ba\\\"\"" pod="kube-system/coredns-7c65d6cfc9-vqj8q" podUID="2c14618b-0831-4e8b-be9e-ba0049031bdb"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.903598    1577 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\": failed to find network info for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\""
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.903661    1577 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\": failed to find network info for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\"" pod="kube-system/coredns-7c65d6cfc9-2l4br"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.903683    1577 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\": failed to find network info for sandbox \"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\"" pod="kube-system/coredns-7c65d6cfc9-2l4br"
	Sep 16 10:51:51 ha-234759 kubelet[1577]: E0916 10:51:51.903739    1577 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-2l4br_kube-system(18d893a2-274a-413e-bf3d-0dd1e88a9984)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-2l4br_kube-system(18d893a2-274a-413e-bf3d-0dd1e88a9984)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\\\": failed to find network info for sandbox \\\"4e9f4ae1d9a7883382ca1b9d68b39ac256299dfecfac589accf620634710c91f\\\"\"" pod="kube-system/coredns-7c65d6cfc9-2l4br" podUID="18d893a2-274a-413e-bf3d-0dd1e88a9984"
	Sep 16 10:51:52 ha-234759 kubelet[1577]: I0916 10:51:52.427403    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/e8924914-9ba5-4adc-ac46-9d3d97b0bc08-tmp\") pod \"storage-provisioner\" (UID: \"e8924914-9ba5-4adc-ac46-9d3d97b0bc08\") " pod="kube-system/storage-provisioner"
	Sep 16 10:51:52 ha-234759 kubelet[1577]: I0916 10:51:52.427993    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8mz2\" (UniqueName: \"kubernetes.io/projected/e8924914-9ba5-4adc-ac46-9d3d97b0bc08-kube-api-access-s8mz2\") pod \"storage-provisioner\" (UID: \"e8924914-9ba5-4adc-ac46-9d3d97b0bc08\") " pod="kube-system/storage-provisioner"
	Sep 16 10:51:52 ha-234759 kubelet[1577]: I0916 10:51:52.466980    1577 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-proxy-gwdl4" podStartSLOduration=1.466960435 podStartE2EDuration="1.466960435s" podCreationTimestamp="2024-09-16 10:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:51:52.466745839 +0000 UTC m=+6.217173255" watchObservedRunningTime="2024-09-16 10:51:52.466960435 +0000 UTC m=+6.217387900"
	Sep 16 10:51:53 ha-234759 kubelet[1577]: I0916 10:51:53.482284    1577 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kindnet-q8nl6" podStartSLOduration=2.482261961 podStartE2EDuration="2.482261961s" podCreationTimestamp="2024-09-16 10:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:51:52.512634019 +0000 UTC m=+6.263061435" watchObservedRunningTime="2024-09-16 10:51:53.482261961 +0000 UTC m=+7.232689377"
	Sep 16 10:51:53 ha-234759 kubelet[1577]: I0916 10:51:53.502715    1577 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/storage-provisioner" podStartSLOduration=1.502654063 podStartE2EDuration="1.502654063s" podCreationTimestamp="2024-09-16 10:51:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:51:53.48355399 +0000 UTC m=+7.233981414" watchObservedRunningTime="2024-09-16 10:51:53.502654063 +0000 UTC m=+7.253081478"
	Sep 16 10:51:57 ha-234759 kubelet[1577]: I0916 10:51:57.010572    1577 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 10:51:57 ha-234759 kubelet[1577]: I0916 10:51:57.011701    1577 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 10:52:04 ha-234759 kubelet[1577]: I0916 10:52:04.547013    1577 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-7c65d6cfc9-vqj8q" podStartSLOduration=13.546981888 podStartE2EDuration="13.546981888s" podCreationTimestamp="2024-09-16 10:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:52:04.522354317 +0000 UTC m=+18.272781733" watchObservedRunningTime="2024-09-16 10:52:04.546981888 +0000 UTC m=+18.297409304"
	Sep 16 10:52:05 ha-234759 kubelet[1577]: I0916 10:52:05.564407    1577 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-7c65d6cfc9-2l4br" podStartSLOduration=14.564378665 podStartE2EDuration="14.564378665s" podCreationTimestamp="2024-09-16 10:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:52:05.5311841 +0000 UTC m=+19.281611524" watchObservedRunningTime="2024-09-16 10:52:05.564378665 +0000 UTC m=+19.314806081"
	Sep 16 10:53:20 ha-234759 kubelet[1577]: I0916 10:53:20.569129    1577 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdx8v\" (UniqueName: \"kubernetes.io/projected/18a7f530-b34d-413a-9028-13511f5b9be6-kube-api-access-wdx8v\") pod \"busybox-7dff88458-kjr9x\" (UID: \"18a7f530-b34d-413a-9028-13511f5b9be6\") " pod="default/busybox-7dff88458-kjr9x"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p ha-234759 -n ha-234759
helpers_test.go:261: (dbg) Run:  kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (861.296µs)
helpers_test.go:263: kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiControlPlane/serial/RestartSecondaryNode (21.47s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DeleteSecondaryNode (13.23s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DeleteSecondaryNode
ha_test.go:487: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 node delete m03 -v=7 --alsologtostderr
ha_test.go:487: (dbg) Done: out/minikube-linux-arm64 -p ha-234759 node delete m03 -v=7 --alsologtostderr: (8.971491377s)
ha_test.go:493: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr
ha_test.go:511: (dbg) Run:  kubectl get nodes
ha_test.go:511: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (759.38µs)
ha_test.go:513: failed to run kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiControlPlane/serial/DeleteSecondaryNode]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect ha-234759
helpers_test.go:235: (dbg) docker inspect ha-234759:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59",
	        "Created": "2024-09-16T10:51:26.447161448Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2121111,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:55:58.019108361Z",
	            "FinishedAt": "2024-09-16T10:55:57.183322081Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/hostname",
	        "HostsPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/hosts",
	        "LogPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59-json.log",
	        "Name": "/ha-234759",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "ha-234759:/var",
	                "/lib/modules:/lib/modules:ro"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "ha-234759",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/merged",
	                "UpperDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/diff",
	                "WorkDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "volume",
	                "Name": "ha-234759",
	                "Source": "/var/lib/docker/volumes/ha-234759/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            },
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            }
	        ],
	        "Config": {
	            "Hostname": "ha-234759",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "ha-234759",
	                "name.minikube.sigs.k8s.io": "ha-234759",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "3ce9bbc7d0321d75efecc7c2cca2c26d3a13bdca0e7e1a623efb1c022d893651",
	            "SandboxKey": "/var/run/docker/netns/3ce9bbc7d032",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40622"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40623"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40626"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40624"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40625"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "ha-234759": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "941929ec13d1e6034904933d29100a93cf04d9e6a30844d8d0c54e3a464c32cd",
	                    "EndpointID": "bc15daaf2d08111ef75e7eeffd0281ba4e1f58863628edfb4614684b9f832282",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "ha-234759",
	                        "6306ac5a5985"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p ha-234759 -n ha-234759
helpers_test.go:244: <<< TestMultiControlPlane/serial/DeleteSecondaryNode FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiControlPlane/serial/DeleteSecondaryNode]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p ha-234759 logs -n 25: (2.260440926s)
helpers_test.go:252: TestMultiControlPlane/serial/DeleteSecondaryNode logs: 
-- stdout --
	
	==> Audit <==
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| Command |                                       Args                                       |  Profile  |  User   | Version |     Start Time      |      End Time       |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759-m02 sudo cat                                          | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m03_ha-234759-m02.txt                             |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m03:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04:/home/docker/cp-test_ha-234759-m03_ha-234759-m04.txt               |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759-m04 sudo cat                                          | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m03_ha-234759-m04.txt                             |           |         |         |                     |                     |
	| cp      | ha-234759 cp testdata/cp-test.txt                                                | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04:/home/docker/cp-test.txt                                           |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /tmp/TestMultiControlPlaneserialCopyFile3470256434/001/cp-test_ha-234759-m04.txt |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759:/home/docker/cp-test_ha-234759-m04_ha-234759.txt                       |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759 sudo cat                                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m04_ha-234759.txt                                 |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m02:/home/docker/cp-test_ha-234759-m04_ha-234759-m02.txt               |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759-m02 sudo cat                                          | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m04_ha-234759-m02.txt                             |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m03:/home/docker/cp-test_ha-234759-m04_ha-234759-m03.txt               |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759-m03 sudo cat                                          | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m04_ha-234759-m03.txt                             |           |         |         |                     |                     |
	| node    | ha-234759 node stop m02 -v=7                                                     | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | ha-234759 node start m02 -v=7                                                    | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:55 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | list -p ha-234759 -v=7                                                           | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC |                     |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| stop    | -p ha-234759 -v=7                                                                | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| start   | -p ha-234759 --wait=true -v=7                                                    | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:57 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | list -p ha-234759                                                                | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:57 UTC |                     |
	| node    | ha-234759 node delete m03 -v=7                                                   | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:57 UTC | 16 Sep 24 10:57 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:55:57
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:55:57.480009 2120908 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:55:57.480154 2120908 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:55:57.480166 2120908 out.go:358] Setting ErrFile to fd 2...
	I0916 10:55:57.480172 2120908 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:55:57.480413 2120908 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:55:57.480764 2120908 out.go:352] Setting JSON to false
	I0916 10:55:57.481737 2120908 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":139100,"bootTime":1726345058,"procs":179,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:55:57.481815 2120908 start.go:139] virtualization:  
	I0916 10:55:57.484672 2120908 out.go:177] * [ha-234759] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:55:57.486814 2120908 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:55:57.487009 2120908 notify.go:220] Checking for updates...
	I0916 10:55:57.490603 2120908 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:55:57.492750 2120908 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:55:57.494790 2120908 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:55:57.496834 2120908 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:55:57.498843 2120908 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:55:57.501849 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:55:57.501943 2120908 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:55:57.528269 2120908 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:55:57.528401 2120908 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:55:57.584410 2120908 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:6 ContainersRunning:2 ContainersPaused:0 ContainersStopped:4 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 10:55:57.574157428 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:55:57.584525 2120908 docker.go:318] overlay module found
	I0916 10:55:57.587413 2120908 out.go:177] * Using the docker driver based on existing profile
	I0916 10:55:57.589737 2120908 start.go:297] selected driver: docker
	I0916 10:55:57.589756 2120908 start.go:901] validating driver "docker" against &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName
:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:f
alse inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0
MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:55:57.589907 2120908 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:55:57.590013 2120908 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:55:57.652571 2120908 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:6 ContainersRunning:2 ContainersPaused:0 ContainersStopped:4 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:52 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 10:55:57.642751405 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:55:57.653008 2120908 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:55:57.653046 2120908 cni.go:84] Creating CNI manager for ""
	I0916 10:55:57.653103 2120908 cni.go:136] multinode detected (4 nodes found), recommending kindnet
	I0916 10:55:57.653158 2120908 start.go:340] cluster config:
	{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:fa
lse istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false
DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:55:57.655737 2120908 out.go:177] * Starting "ha-234759" primary control-plane node in "ha-234759" cluster
	I0916 10:55:57.658867 2120908 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:55:57.661320 2120908 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:55:57.663600 2120908 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:55:57.663655 2120908 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:55:57.663668 2120908 cache.go:56] Caching tarball of preloaded images
	I0916 10:55:57.663666 2120908 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:55:57.663748 2120908 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:55:57.663758 2120908 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:55:57.663903 2120908 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	W0916 10:55:57.682798 2120908 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:55:57.682821 2120908 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:55:57.682898 2120908 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:55:57.682921 2120908 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:55:57.682926 2120908 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:55:57.682937 2120908 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:55:57.682944 2120908 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:55:57.684197 2120908 image.go:273] response: 
	I0916 10:55:57.856254 2120908 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:55:57.856305 2120908 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:55:57.856335 2120908 start.go:360] acquireMachinesLock for ha-234759: {Name:mk07434fa5fb218c324ac4567510c65c6e772f63 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:55:57.856402 2120908 start.go:364] duration metric: took 42.798µs to acquireMachinesLock for "ha-234759"
	I0916 10:55:57.856426 2120908 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:55:57.856435 2120908 fix.go:54] fixHost starting: 
	I0916 10:55:57.856729 2120908 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:55:57.873037 2120908 fix.go:112] recreateIfNeeded on ha-234759: state=Stopped err=<nil>
	W0916 10:55:57.873066 2120908 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:55:57.877930 2120908 out.go:177] * Restarting existing docker container for "ha-234759" ...
	I0916 10:55:57.880628 2120908 cli_runner.go:164] Run: docker start ha-234759
	I0916 10:55:58.197852 2120908 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:55:58.223512 2120908 kic.go:430] container "ha-234759" state is running.
	I0916 10:55:58.225048 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:55:58.246596 2120908 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:55:58.247102 2120908 machine.go:93] provisionDockerMachine start ...
	I0916 10:55:58.247184 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:55:58.279538 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:55:58.279797 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40622 <nil> <nil>}
	I0916 10:55:58.279812 2120908 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:55:58.281082 2120908 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:54004->127.0.0.1:40622: read: connection reset by peer
	I0916 10:56:01.418241 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759
	
	I0916 10:56:01.418274 2120908 ubuntu.go:169] provisioning hostname "ha-234759"
	I0916 10:56:01.418342 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:56:01.434891 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:56:01.435172 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40622 <nil> <nil>}
	I0916 10:56:01.435191 2120908 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759 && echo "ha-234759" | sudo tee /etc/hostname
	I0916 10:56:01.587029 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759
	
	I0916 10:56:01.587169 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:56:01.604059 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:56:01.604324 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40622 <nil> <nil>}
	I0916 10:56:01.604347 2120908 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:56:01.738989 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:56:01.739021 2120908 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:56:01.739053 2120908 ubuntu.go:177] setting up certificates
	I0916 10:56:01.739064 2120908 provision.go:84] configureAuth start
	I0916 10:56:01.739136 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:56:01.755725 2120908 provision.go:143] copyHostCerts
	I0916 10:56:01.755770 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:56:01.755813 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:56:01.755819 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:56:01.755899 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:56:01.755979 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:56:01.755995 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:56:01.755999 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:56:01.756027 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:56:01.756065 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:56:01.756081 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:56:01.756085 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:56:01.756107 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:56:01.756150 2120908 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759 san=[127.0.0.1 192.168.49.2 ha-234759 localhost minikube]
	I0916 10:56:02.106284 2120908 provision.go:177] copyRemoteCerts
	I0916 10:56:02.106356 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:56:02.106402 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:56:02.124816 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40622 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:56:02.224013 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:56:02.224072 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:56:02.253204 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:56:02.253282 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1196 bytes)
	I0916 10:56:02.278448 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:56:02.278513 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:56:02.303118 2120908 provision.go:87] duration metric: took 564.038715ms to configureAuth
	I0916 10:56:02.303144 2120908 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:56:02.303382 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:56:02.303397 2120908 machine.go:96] duration metric: took 4.05627714s to provisionDockerMachine
	I0916 10:56:02.303406 2120908 start.go:293] postStartSetup for "ha-234759" (driver="docker")
	I0916 10:56:02.303418 2120908 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:56:02.303474 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:56:02.303518 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:56:02.320346 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40622 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:56:02.415966 2120908 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:56:02.419017 2120908 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:56:02.419053 2120908 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:56:02.419064 2120908 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:56:02.419071 2120908 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:56:02.419082 2120908 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:56:02.419139 2120908 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:56:02.419221 2120908 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:56:02.419232 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:56:02.419331 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:56:02.427603 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:56:02.451101 2120908 start.go:296] duration metric: took 147.679579ms for postStartSetup
	I0916 10:56:02.451221 2120908 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:56:02.451286 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:56:02.467364 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40622 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:56:02.559530 2120908 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:56:02.564156 2120908 fix.go:56] duration metric: took 4.707711667s for fixHost
	I0916 10:56:02.564180 2120908 start.go:83] releasing machines lock for "ha-234759", held for 4.707766223s
	I0916 10:56:02.564262 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:56:02.580679 2120908 ssh_runner.go:195] Run: cat /version.json
	I0916 10:56:02.580734 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:56:02.580738 2120908 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:56:02.580823 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:56:02.600232 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40622 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:56:02.604194 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40622 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:56:02.694093 2120908 ssh_runner.go:195] Run: systemctl --version
	I0916 10:56:02.817302 2120908 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:56:02.821815 2120908 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:56:02.842186 2120908 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:56:02.842310 2120908 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:56:02.851834 2120908 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:56:02.851858 2120908 start.go:495] detecting cgroup driver to use...
	I0916 10:56:02.851893 2120908 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:56:02.851965 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:56:02.866221 2120908 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:56:02.877863 2120908 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:56:02.877934 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:56:02.891011 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:56:02.902785 2120908 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:56:02.992999 2120908 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:56:03.098961 2120908 docker.go:233] disabling docker service ...
	I0916 10:56:03.099103 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:56:03.112655 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:56:03.125591 2120908 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:56:03.220377 2120908 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:56:03.307626 2120908 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:56:03.319639 2120908 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:56:03.337264 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:56:03.347898 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:56:03.357989 2120908 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:56:03.358124 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:56:03.368198 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:56:03.378623 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:56:03.388722 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:56:03.398596 2120908 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:56:03.408128 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:56:03.418529 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:56:03.428819 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:56:03.439198 2120908 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:56:03.448073 2120908 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:56:03.457035 2120908 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:56:03.537187 2120908 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:56:03.705619 2120908 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:56:03.705760 2120908 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:56:03.709719 2120908 start.go:563] Will wait 60s for crictl version
	I0916 10:56:03.709826 2120908 ssh_runner.go:195] Run: which crictl
	I0916 10:56:03.713625 2120908 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:56:03.753887 2120908 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:56:03.753969 2120908 ssh_runner.go:195] Run: containerd --version
	I0916 10:56:03.781209 2120908 ssh_runner.go:195] Run: containerd --version
	I0916 10:56:03.809080 2120908 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:56:03.811654 2120908 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:56:03.833037 2120908 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:56:03.836882 2120908 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:56:03.849026 2120908 kubeadm.go:883] updating cluster {Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false in
accel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountTy
pe:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:56:03.849192 2120908 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:56:03.849259 2120908 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:56:03.886704 2120908 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:56:03.886729 2120908 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:56:03.886795 2120908 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:56:03.926201 2120908 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:56:03.926221 2120908 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:56:03.926230 2120908 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 containerd true true} ...
	I0916 10:56:03.926340 2120908 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:56:03.926407 2120908 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:56:03.967217 2120908 cni.go:84] Creating CNI manager for ""
	I0916 10:56:03.967244 2120908 cni.go:136] multinode detected (4 nodes found), recommending kindnet
	I0916 10:56:03.967253 2120908 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:56:03.967275 2120908 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:ha-234759 NodeName:ha-234759 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kuberne
tes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:56:03.967405 2120908 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "ha-234759"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:56:03.967427 2120908 kube-vip.go:115] generating kube-vip config ...
	I0916 10:56:03.967483 2120908 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:56:03.980684 2120908 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:56:03.980801 2120908 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:56:03.980868 2120908 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:56:03.990248 2120908 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:56:03.990322 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube /etc/kubernetes/manifests
	I0916 10:56:03.999569 2120908 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (313 bytes)
	I0916 10:56:04.021934 2120908 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:56:04.042564 2120908 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2163 bytes)
	I0916 10:56:04.062148 2120908 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:56:04.082322 2120908 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:56:04.085896 2120908 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:56:04.097613 2120908 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:56:04.192358 2120908 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:56:04.206373 2120908 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.2
	I0916 10:56:04.206395 2120908 certs.go:194] generating shared ca certs ...
	I0916 10:56:04.206412 2120908 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:56:04.206609 2120908 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:56:04.206719 2120908 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:56:04.206732 2120908 certs.go:256] generating profile certs ...
	I0916 10:56:04.206815 2120908 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key
	I0916 10:56:04.206855 2120908 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.4003aa2c
	I0916 10:56:04.206879 2120908 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.4003aa2c with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.4 192.168.49.254]
	I0916 10:56:04.612344 2120908 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.4003aa2c ...
	I0916 10:56:04.612446 2120908 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.4003aa2c: {Name:mkb92a6345747170d35621c678771a849d4f5dcc Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:56:04.612773 2120908 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.4003aa2c ...
	I0916 10:56:04.612824 2120908 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.4003aa2c: {Name:mk6b053a03fa931611de973f9c169c7c1fe3d394 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:56:04.613009 2120908 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.4003aa2c -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt
	I0916 10:56:04.613251 2120908 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.4003aa2c -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key
	I0916 10:56:04.613499 2120908 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key
	I0916 10:56:04.613551 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:56:04.613612 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:56:04.613655 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:56:04.613702 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:56:04.613747 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:56:04.613811 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:56:04.613871 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:56:04.613908 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:56:04.614038 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:56:04.614132 2120908 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:56:04.614175 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:56:04.614220 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:56:04.614298 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:56:04.614351 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:56:04.614454 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:56:04.614542 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:04.614626 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:56:04.614664 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:56:04.615493 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:56:04.644665 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:56:04.671224 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:56:04.697493 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:56:04.723062 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1440 bytes)
	I0916 10:56:04.748214 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:56:04.772839 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:56:04.799224 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 10:56:04.825767 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:56:04.850459 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:56:04.875561 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:56:04.900800 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:56:04.918980 2120908 ssh_runner.go:195] Run: openssl version
	I0916 10:56:04.924509 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:56:04.934215 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:56:04.937743 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:56:04.937812 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:56:04.944798 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:56:04.953975 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:56:04.963221 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:04.966574 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:04.966636 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:04.973524 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:56:04.982373 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:56:04.991875 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:56:04.995371 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:56:04.995451 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:56:05.002852 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:56:05.016182 2120908 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:56:05.020820 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:56:05.028722 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:56:05.036274 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:56:05.043622 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:56:05.051721 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:56:05.059023 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:56:05.066091 2120908 kubeadm.go:392] StartCluster: {Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inacc
el:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:
9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:56:05.066227 2120908 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:56:05.066291 2120908 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:56:05.108562 2120908 cri.go:89] found id: "36da5a0fb370e723326a9743b110be293f982a4028eb1a1d81cc396f52ee1ec8"
	I0916 10:56:05.108586 2120908 cri.go:89] found id: "0e47bf675d7dfaa397cb12c3a975c0fdae535cdf7989381139d757ac5f8a5eaf"
	I0916 10:56:05.108591 2120908 cri.go:89] found id: "e629c24c41e32603dc9a53125aa7122a5a0c58d985e95165cffe89d5670988c4"
	I0916 10:56:05.108595 2120908 cri.go:89] found id: "2586d6167e7558670ba282e8e630c6301933dfdf0302bef7718de8fada959378"
	I0916 10:56:05.108599 2120908 cri.go:89] found id: "7d51a8f7f42ff3becfe558f4f4801bec107af9426327a36d60c9cf3b27276148"
	I0916 10:56:05.108603 2120908 cri.go:89] found id: "900d2ad5148fe65aefe93ce1d29763ab71494f94b5511bca347f273235ccc038"
	I0916 10:56:05.108613 2120908 cri.go:89] found id: "324a5470436890084c7d201c8b4f70a15952b517d95c7e4094491c0aafb39871"
	I0916 10:56:05.108617 2120908 cri.go:89] found id: "5a7d53b11a05f48872cdd02a26e2074bdb1c6ee6e353ceb2ff9519faca117d67"
	I0916 10:56:05.108622 2120908 cri.go:89] found id: "a7002833ce71be5c884b6b01ea4b5b23ca5c4dbd6a84cfaeed6b4d3e9829e35b"
	I0916 10:56:05.108637 2120908 cri.go:89] found id: "fd48034050bae874c9e190debc0b1bfa138cdf53fc5887bd15f027e7346ca82d"
	I0916 10:56:05.108641 2120908 cri.go:89] found id: ""
	I0916 10:56:05.108699 2120908 ssh_runner.go:195] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0916 10:56:05.121869 2120908 cri.go:116] JSON = null
	W0916 10:56:05.121921 2120908 kubeadm.go:399] unpause failed: list paused: list returned 0 containers, but ps returned 10
	I0916 10:56:05.121983 2120908 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:56:05.131810 2120908 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 10:56:05.131830 2120908 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 10:56:05.131888 2120908 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 10:56:05.141239 2120908 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:56:05.141679 2120908 kubeconfig.go:47] verify endpoint returned: get endpoint: "ha-234759" does not appear in /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:56:05.141789 2120908 kubeconfig.go:62] /home/jenkins/minikube-integration/19651-2057935/kubeconfig needs updating (will repair): [kubeconfig missing "ha-234759" cluster setting kubeconfig missing "ha-234759" context setting]
	I0916 10:56:05.142087 2120908 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:56:05.142519 2120908 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:56:05.142817 2120908 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]string(
nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:56:05.143292 2120908 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 10:56:05.143517 2120908 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 10:56:05.153710 2120908 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.49.2
	I0916 10:56:05.153734 2120908 kubeadm.go:597] duration metric: took 21.896971ms to restartPrimaryControlPlane
	I0916 10:56:05.153743 2120908 kubeadm.go:394] duration metric: took 87.661992ms to StartCluster
	I0916 10:56:05.153792 2120908 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:56:05.153861 2120908 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:56:05.154473 2120908 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:56:05.154797 2120908 start.go:233] HA (multi-control plane) cluster: will skip waiting for primary control-plane node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:56:05.154822 2120908 start.go:241] waiting for startup goroutines ...
	I0916 10:56:05.154830 2120908 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:56:05.155299 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:56:05.159302 2120908 out.go:177] * Enabled addons: 
	I0916 10:56:05.161241 2120908 addons.go:510] duration metric: took 6.404489ms for enable addons: enabled=[]
	I0916 10:56:05.161280 2120908 start.go:246] waiting for cluster config update ...
	I0916 10:56:05.161292 2120908 start.go:255] writing updated cluster config ...
	I0916 10:56:05.163752 2120908 out.go:201] 
	I0916 10:56:05.166164 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:56:05.166284 2120908 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:56:05.169054 2120908 out.go:177] * Starting "ha-234759-m02" control-plane node in "ha-234759" cluster
	I0916 10:56:05.171408 2120908 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:56:05.173903 2120908 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:56:05.176283 2120908 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:56:05.176318 2120908 cache.go:56] Caching tarball of preloaded images
	I0916 10:56:05.176375 2120908 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:56:05.176417 2120908 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:56:05.176434 2120908 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:56:05.176554 2120908 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	W0916 10:56:05.199768 2120908 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:56:05.199790 2120908 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:56:05.199859 2120908 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:56:05.199880 2120908 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:56:05.199889 2120908 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:56:05.199896 2120908 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:56:05.199902 2120908 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:56:05.201082 2120908 image.go:273] response: 
	I0916 10:56:05.393831 2120908 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:56:05.393874 2120908 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:56:05.393912 2120908 start.go:360] acquireMachinesLock for ha-234759-m02: {Name:mk8d038416b8f502330f7520e1c7f720d49da587 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:56:05.393996 2120908 start.go:364] duration metric: took 61.218µs to acquireMachinesLock for "ha-234759-m02"
	I0916 10:56:05.394022 2120908 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:56:05.394037 2120908 fix.go:54] fixHost starting: m02
	I0916 10:56:05.394332 2120908 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:56:05.410832 2120908 fix.go:112] recreateIfNeeded on ha-234759-m02: state=Stopped err=<nil>
	W0916 10:56:05.410862 2120908 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:56:05.413947 2120908 out.go:177] * Restarting existing docker container for "ha-234759-m02" ...
	I0916 10:56:05.416591 2120908 cli_runner.go:164] Run: docker start ha-234759-m02
	I0916 10:56:05.707662 2120908 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:56:05.729956 2120908 kic.go:430] container "ha-234759-m02" state is running.
	I0916 10:56:05.730364 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:56:05.756796 2120908 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:56:05.757038 2120908 machine.go:93] provisionDockerMachine start ...
	I0916 10:56:05.757102 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:56:05.775969 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:56:05.776205 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40627 <nil> <nil>}
	I0916 10:56:05.776221 2120908 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:56:05.777130 2120908 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 10:56:08.977747 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m02
	
	I0916 10:56:08.977814 2120908 ubuntu.go:169] provisioning hostname "ha-234759-m02"
	I0916 10:56:08.977910 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:56:09.013541 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:56:09.013813 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40627 <nil> <nil>}
	I0916 10:56:09.013827 2120908 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759-m02 && echo "ha-234759-m02" | sudo tee /etc/hostname
	I0916 10:56:09.212820 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m02
	
	I0916 10:56:09.212943 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:56:09.243000 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:56:09.243241 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40627 <nil> <nil>}
	I0916 10:56:09.243261 2120908 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:56:09.408230 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:56:09.408300 2120908 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:56:09.408334 2120908 ubuntu.go:177] setting up certificates
	I0916 10:56:09.408354 2120908 provision.go:84] configureAuth start
	I0916 10:56:09.408445 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:56:09.429295 2120908 provision.go:143] copyHostCerts
	I0916 10:56:09.429335 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:56:09.429367 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:56:09.429375 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:56:09.429473 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:56:09.429559 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:56:09.429575 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:56:09.429579 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:56:09.429606 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:56:09.429652 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:56:09.429667 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:56:09.429671 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:56:09.429697 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:56:09.429789 2120908 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759-m02 san=[127.0.0.1 192.168.49.3 ha-234759-m02 localhost minikube]
	I0916 10:56:09.745068 2120908 provision.go:177] copyRemoteCerts
	I0916 10:56:09.745222 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:56:09.745300 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:56:09.767656 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40627 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:56:09.872324 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:56:09.872400 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:56:09.905781 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:56:09.905894 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:56:09.935922 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:56:09.935988 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:56:09.974222 2120908 provision.go:87] duration metric: took 565.826225ms to configureAuth
	I0916 10:56:09.974254 2120908 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:56:09.974506 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:56:09.974536 2120908 machine.go:96] duration metric: took 4.217490031s to provisionDockerMachine
	I0916 10:56:09.974549 2120908 start.go:293] postStartSetup for "ha-234759-m02" (driver="docker")
	I0916 10:56:09.974560 2120908 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:56:09.974618 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:56:09.974665 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:56:10.014627 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40627 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:56:10.149908 2120908 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:56:10.161060 2120908 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:56:10.161105 2120908 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:56:10.161123 2120908 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:56:10.161130 2120908 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:56:10.161141 2120908 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:56:10.161206 2120908 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:56:10.161301 2120908 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:56:10.161318 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:56:10.161427 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:56:10.185870 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:56:10.240030 2120908 start.go:296] duration metric: took 265.450702ms for postStartSetup
	I0916 10:56:10.240195 2120908 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:56:10.240254 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:56:10.280325 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40627 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:56:10.395219 2120908 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:56:10.401492 2120908 fix.go:56] duration metric: took 5.00745236s for fixHost
	I0916 10:56:10.401514 2120908 start.go:83] releasing machines lock for "ha-234759-m02", held for 5.007506538s
	I0916 10:56:10.401591 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:56:10.433089 2120908 out.go:177] * Found network options:
	I0916 10:56:10.435552 2120908 out.go:177]   - NO_PROXY=192.168.49.2
	W0916 10:56:10.438186 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:56:10.438239 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:56:10.438317 2120908 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:56:10.438374 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:56:10.438646 2120908 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:56:10.438744 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:56:10.490130 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40627 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:56:10.498921 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40627 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:56:10.620640 2120908 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:56:10.841874 2120908 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:56:10.841961 2120908 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:56:10.868679 2120908 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:56:10.868722 2120908 start.go:495] detecting cgroup driver to use...
	I0916 10:56:10.868758 2120908 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:56:10.868822 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:56:10.925484 2120908 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:56:10.968415 2120908 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:56:10.968520 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:56:11.013184 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:56:11.032943 2120908 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:56:11.334608 2120908 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:56:11.628139 2120908 docker.go:233] disabling docker service ...
	I0916 10:56:11.628257 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:56:11.665345 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:56:11.715171 2120908 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:56:12.056762 2120908 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:56:12.296630 2120908 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:56:12.331454 2120908 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:56:12.367113 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:56:12.385702 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:56:12.412051 2120908 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:56:12.412167 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:56:12.433510 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:56:12.452273 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:56:12.486202 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:56:12.509468 2120908 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:56:12.537220 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:56:12.555347 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:56:12.572426 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:56:12.592137 2120908 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:56:12.608101 2120908 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:56:12.621598 2120908 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:56:12.780829 2120908 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:56:13.656778 2120908 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:56:13.656908 2120908 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:56:13.671332 2120908 start.go:563] Will wait 60s for crictl version
	I0916 10:56:13.671454 2120908 ssh_runner.go:195] Run: which crictl
	I0916 10:56:13.679813 2120908 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:56:13.827499 2120908 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:56:13.827605 2120908 ssh_runner.go:195] Run: containerd --version
	I0916 10:56:13.910866 2120908 ssh_runner.go:195] Run: containerd --version
	I0916 10:56:13.990669 2120908 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:56:13.993882 2120908 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:56:13.996601 2120908 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:56:14.027045 2120908 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:56:14.030827 2120908 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:56:14.066417 2120908 mustload.go:65] Loading cluster: ha-234759
	I0916 10:56:14.066692 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:56:14.066975 2120908 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:56:14.088642 2120908 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:56:14.088948 2120908 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.3
	I0916 10:56:14.088962 2120908 certs.go:194] generating shared ca certs ...
	I0916 10:56:14.088978 2120908 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:56:14.089093 2120908 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:56:14.089147 2120908 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:56:14.089160 2120908 certs.go:256] generating profile certs ...
	I0916 10:56:14.089237 2120908 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key
	I0916 10:56:14.089314 2120908 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.0b38f7a6
	I0916 10:56:14.089360 2120908 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key
	I0916 10:56:14.089373 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:56:14.089387 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:56:14.089403 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:56:14.089422 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:56:14.089437 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:56:14.089456 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:56:14.089470 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:56:14.089485 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:56:14.089539 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:56:14.089572 2120908 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:56:14.089584 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:56:14.089608 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:56:14.089638 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:56:14.089664 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:56:14.089721 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:56:14.089758 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:56:14.089798 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:14.089812 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:56:14.089874 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:56:14.107702 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40622 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:56:14.202988 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:56:14.207264 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:56:14.252686 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:56:14.262380 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1679 bytes)
	I0916 10:56:14.305693 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:56:14.318039 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:56:14.383785 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:56:14.399149 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1679 bytes)
	I0916 10:56:14.433207 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:56:14.460816 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:56:14.507114 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:56:14.512228 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1675 bytes)
	I0916 10:56:14.547418 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:56:14.640619 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:56:14.715737 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:56:14.875363 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:56:15.007224 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1440 bytes)
	I0916 10:56:15.100956 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:56:15.223785 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:56:15.356166 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 10:56:15.465853 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:56:15.575339 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:56:15.680158 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:56:15.746352 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:56:15.818164 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1679 bytes)
	I0916 10:56:15.876139 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:56:15.988572 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1679 bytes)
	I0916 10:56:16.092849 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:56:16.168261 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1675 bytes)
	I0916 10:56:16.276906 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:56:16.304639 2120908 ssh_runner.go:195] Run: openssl version
	I0916 10:56:16.311382 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:56:16.322440 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:56:16.327621 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:56:16.327706 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:56:16.361834 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:56:16.403901 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:56:16.433514 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:56:16.452569 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:56:16.452668 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:56:16.467885 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:56:16.496481 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:56:16.540337 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:16.552870 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:16.552989 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:16.575343 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:56:16.607219 2120908 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:56:16.624765 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:56:16.647106 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:56:16.670959 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:56:16.692036 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:56:16.719339 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:56:16.743232 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:56:16.763768 2120908 kubeadm.go:934] updating node {m02 192.168.49.3 8443 v1.31.1 containerd true true} ...
	I0916 10:56:16.763899 2120908 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:56:16.763931 2120908 kube-vip.go:115] generating kube-vip config ...
	I0916 10:56:16.763995 2120908 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:56:16.808017 2120908 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:56:16.808112 2120908 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:56:16.808188 2120908 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:56:16.848657 2120908 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:56:16.848742 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:56:16.869272 2120908 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:56:16.915916 2120908 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:56:16.969228 2120908 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:56:17.012529 2120908 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:56:17.027319 2120908 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:56:17.046154 2120908 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:56:17.244416 2120908 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:56:17.263351 2120908 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:56:17.263857 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:56:17.267777 2120908 out.go:177] * Verifying Kubernetes components...
	I0916 10:56:17.269940 2120908 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:56:17.488739 2120908 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:56:17.519457 2120908 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:56:17.519765 2120908 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:56:17.519831 2120908 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:56:17.520053 2120908 node_ready.go:35] waiting up to 6m0s for node "ha-234759-m02" to be "Ready" ...
	I0916 10:56:17.520138 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:56:17.520150 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:17.520159 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:17.520163 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.201656 2120908 round_trippers.go:574] Response Status: 200 OK in 1681 milliseconds
	I0916 10:56:19.202990 2120908 node_ready.go:49] node "ha-234759-m02" has status "Ready":"True"
	I0916 10:56:19.203022 2120908 node_ready.go:38] duration metric: took 1.68294974s for node "ha-234759-m02" to be "Ready" ...
	I0916 10:56:19.203033 2120908 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:56:19.203110 2120908 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 10:56:19.203129 2120908 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 10:56:19.203198 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:56:19.203208 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.203217 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.203222 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.227735 2120908 round_trippers.go:574] Response Status: 200 OK in 24 milliseconds
	I0916 10:56:19.241048 2120908 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.241259 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:19.241287 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.241306 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.241322 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.256472 2120908 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 10:56:19.257181 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:19.257196 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.257206 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.257210 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.267349 2120908 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:56:19.268607 2120908 pod_ready.go:93] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:19.268660 2120908 pod_ready.go:82] duration metric: took 27.462073ms for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.268674 2120908 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.268760 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vqj8q
	I0916 10:56:19.268765 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.268772 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.268776 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.282874 2120908 round_trippers.go:574] Response Status: 200 OK in 14 milliseconds
	I0916 10:56:19.283996 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:19.284053 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.284078 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.284093 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.288378 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:19.288948 2120908 pod_ready.go:93] pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:19.288993 2120908 pod_ready.go:82] duration metric: took 20.311266ms for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.289020 2120908 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.289161 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759
	I0916 10:56:19.289190 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.289210 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.289229 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.293603 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:19.294385 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:19.294434 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.294459 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.294477 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.303749 2120908 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:56:19.304953 2120908 pod_ready.go:93] pod "etcd-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:19.305011 2120908 pod_ready.go:82] duration metric: took 15.970411ms for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.305037 2120908 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.305138 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:56:19.305173 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.305196 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.305213 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.309075 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:19.310073 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:56:19.310133 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.310157 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.310176 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.319209 2120908 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:56:19.319871 2120908 pod_ready.go:93] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:19.320001 2120908 pod_ready.go:82] duration metric: took 14.886447ms for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.320043 2120908 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.320181 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:56:19.320227 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.320258 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.320295 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.331506 2120908 round_trippers.go:574] Response Status: 200 OK in 11 milliseconds
	I0916 10:56:19.404272 2120908 request.go:632] Waited for 71.150908ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:56:19.404417 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:56:19.404431 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.404440 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.404444 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.409076 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:19.410200 2120908 pod_ready.go:93] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:19.410269 2120908 pod_ready.go:82] duration metric: took 90.19285ms for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.410334 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.603784 2120908 request.go:632] Waited for 193.331128ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:56:19.603864 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:56:19.603877 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.603886 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.603892 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.607106 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:19.803294 2120908 request.go:632] Waited for 195.238718ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:19.803353 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:19.803360 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:19.803374 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:19.803382 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:19.808195 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:19.809167 2120908 pod_ready.go:93] pod "kube-apiserver-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:19.809190 2120908 pod_ready.go:82] duration metric: took 398.82807ms for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:19.809201 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:20.004093 2120908 request.go:632] Waited for 194.821375ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:56:20.004165 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:56:20.004176 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:20.004186 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:20.004194 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:20.012141 2120908 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:56:20.203875 2120908 request.go:632] Waited for 190.34713ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:56:20.203935 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:56:20.203941 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:20.203950 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:20.203964 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:20.208923 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:20.210659 2120908 pod_ready.go:93] pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:20.210714 2120908 pod_ready.go:82] duration metric: took 401.50478ms for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:20.210727 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:20.404172 2120908 request.go:632] Waited for 193.345437ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:56:20.404231 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:56:20.404239 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:20.404248 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:20.404256 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:20.414022 2120908 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:56:20.603306 2120908 request.go:632] Waited for 188.265149ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:56:20.603370 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:56:20.603377 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:20.603386 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:20.603392 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:20.615671 2120908 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:56:20.616698 2120908 pod_ready.go:93] pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:20.616747 2120908 pod_ready.go:82] duration metric: took 406.012329ms for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:20.616787 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:20.804041 2120908 request.go:632] Waited for 187.172166ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:56:20.804111 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:56:20.804123 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:20.804132 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:20.804137 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:20.807601 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:21.008139 2120908 request.go:632] Waited for 199.636344ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:21.008370 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:21.008409 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:21.008432 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:21.008441 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:21.011979 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:21.012630 2120908 pod_ready.go:93] pod "kube-controller-manager-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:21.012652 2120908 pod_ready.go:82] duration metric: took 395.841152ms for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:21.012682 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:21.204074 2120908 request.go:632] Waited for 191.284814ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:56:21.204148 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:56:21.204158 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:21.204167 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:21.204172 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:21.207898 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:21.403931 2120908 request.go:632] Waited for 194.324065ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:56:21.404012 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:56:21.404021 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:21.404049 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:21.404063 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:21.407588 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:21.408198 2120908 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:21.408220 2120908 pod_ready.go:82] duration metric: took 395.528571ms for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:21.408232 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:21.603533 2120908 request.go:632] Waited for 195.198275ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:56:21.603631 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:56:21.603644 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:21.603653 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:21.603657 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:21.607092 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:21.804210 2120908 request.go:632] Waited for 196.322674ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:56:21.804265 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:56:21.804303 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:21.804341 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:21.804349 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:21.807905 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:21.808797 2120908 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:21.808819 2120908 pod_ready.go:82] duration metric: took 400.546295ms for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:21.808831 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:22.004346 2120908 request.go:632] Waited for 195.420436ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:56:22.004423 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:56:22.004429 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:22.004443 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:22.004447 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:22.008958 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:22.203451 2120908 request.go:632] Waited for 193.246681ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:56:22.203564 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:56:22.203581 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:22.203590 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:22.203596 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:22.208144 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:22.208739 2120908 pod_ready.go:93] pod "kube-proxy-f4jm2" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:22.208760 2120908 pod_ready.go:82] duration metric: took 399.905635ms for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:22.208772 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:22.403775 2120908 request.go:632] Waited for 194.907118ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:56:22.403877 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:56:22.403899 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:22.403922 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:22.403938 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:22.406886 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:22.603340 2120908 request.go:632] Waited for 195.414667ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:22.603440 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:22.603452 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:22.603462 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:22.603472 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:22.606827 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:22.607437 2120908 pod_ready.go:93] pod "kube-proxy-gwdl4" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:22.607467 2120908 pod_ready.go:82] duration metric: took 398.687722ms for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:22.607480 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-m84xg" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:22.803478 2120908 request.go:632] Waited for 195.922684ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:56:22.803543 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:56:22.803552 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:22.803561 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:22.803566 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:22.806743 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:23.005420 2120908 request.go:632] Waited for 197.642928ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:56:23.005495 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:56:23.005502 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:23.005511 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:23.005517 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:23.013821 2120908 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:56:23.014376 2120908 pod_ready.go:93] pod "kube-proxy-m84xg" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:23.014395 2120908 pod_ready.go:82] duration metric: took 406.903704ms for pod "kube-proxy-m84xg" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:23.014407 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:23.203824 2120908 request.go:632] Waited for 189.343222ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:56:23.203895 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:56:23.203912 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:23.203921 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:23.203931 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:23.217328 2120908 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 10:56:23.404165 2120908 request.go:632] Waited for 185.271838ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:56:23.404233 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:56:23.404244 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:23.404254 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:23.404259 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:23.415479 2120908 round_trippers.go:574] Response Status: 200 OK in 11 milliseconds
	I0916 10:56:23.417034 2120908 pod_ready.go:93] pod "kube-proxy-qrdxc" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:23.417061 2120908 pod_ready.go:82] duration metric: took 402.645901ms for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:23.417074 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:23.604278 2120908 request.go:632] Waited for 187.131092ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:56:23.604348 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:56:23.604360 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:23.604370 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:23.604376 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:23.607975 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:23.804013 2120908 request.go:632] Waited for 195.334488ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:23.804071 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:23.804080 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:23.804089 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:23.804098 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:23.807504 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:23.808051 2120908 pod_ready.go:93] pod "kube-scheduler-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:23.808070 2120908 pod_ready.go:82] duration metric: took 390.986938ms for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:23.808081 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:24.006391 2120908 request.go:632] Waited for 198.227786ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:56:24.006513 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:56:24.006530 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:24.006540 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:24.006561 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:24.011448 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:24.203559 2120908 request.go:632] Waited for 191.168581ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:56:24.203661 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:56:24.203697 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:24.203713 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:24.203720 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:24.206888 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:24.207457 2120908 pod_ready.go:93] pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:24.207478 2120908 pod_ready.go:82] duration metric: took 399.386393ms for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:24.207490 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:24.403312 2120908 request.go:632] Waited for 195.749451ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:56:24.403427 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:56:24.403437 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:24.403455 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:24.403461 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:24.408421 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:24.603742 2120908 request.go:632] Waited for 194.303503ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:56:24.603802 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:56:24.603814 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:24.603823 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:24.603832 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:24.607304 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:24.607893 2120908 pod_ready.go:93] pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:56:24.607911 2120908 pod_ready.go:82] duration metric: took 400.412257ms for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:24.607956 2120908 pod_ready.go:39] duration metric: took 5.40491079s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:56:24.607972 2120908 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:56:24.608045 2120908 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:56:24.619223 2120908 api_server.go:72] duration metric: took 7.355776152s to wait for apiserver process to appear ...
	I0916 10:56:24.619249 2120908 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:56:24.619272 2120908 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:56:24.629692 2120908 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:56:24.629808 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:56:24.629821 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:24.629830 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:24.629836 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:24.630877 2120908 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:56:24.630989 2120908 api_server.go:141] control plane version: v1.31.1
	I0916 10:56:24.631003 2120908 api_server.go:131] duration metric: took 11.746888ms to wait for apiserver health ...
	I0916 10:56:24.631010 2120908 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:56:24.803343 2120908 request.go:632] Waited for 172.244884ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:56:24.803407 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:56:24.803417 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:24.803425 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:24.803432 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:24.809378 2120908 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:56:24.819745 2120908 system_pods.go:59] 26 kube-system pods found
	I0916 10:56:24.819787 2120908 system_pods.go:61] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:56:24.819794 2120908 system_pods.go:61] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:56:24.819798 2120908 system_pods.go:61] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:56:24.819802 2120908 system_pods.go:61] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:56:24.819807 2120908 system_pods.go:61] "etcd-ha-234759-m03" [701fa3e0-9014-4f92-9734-6b708cb56aa0] Running
	I0916 10:56:24.819811 2120908 system_pods.go:61] "kindnet-jhkc5" [59ea0a34-9a2b-44f2-901f-2bcc13b91a1b] Running
	I0916 10:56:24.819815 2120908 system_pods.go:61] "kindnet-lwtj4" [581ce31f-8039-42fe-a2a8-d64ec858cd32] Running
	I0916 10:56:24.819819 2120908 system_pods.go:61] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:56:24.819826 2120908 system_pods.go:61] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0916 10:56:24.819837 2120908 system_pods.go:61] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:56:24.819851 2120908 system_pods.go:61] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:56:24.819856 2120908 system_pods.go:61] "kube-apiserver-ha-234759-m03" [136cdbc8-ac04-4ca9-85a2-17ff91df20c0] Running
	I0916 10:56:24.819861 2120908 system_pods.go:61] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:56:24.819870 2120908 system_pods.go:61] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 10:56:24.819874 2120908 system_pods.go:61] "kube-controller-manager-ha-234759-m03" [61a9f46c-7889-49f1-afe7-1bbce98f0b5a] Running
	I0916 10:56:24.819882 2120908 system_pods.go:61] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running / Ready:ContainersNotReady (containers with unready status: [kube-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-proxy])
	I0916 10:56:24.819890 2120908 system_pods.go:61] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:56:24.819895 2120908 system_pods.go:61] "kube-proxy-m84xg" [6b84ade0-ae34-4c7e-b1ed-e9c83f4bf130] Running
	I0916 10:56:24.819899 2120908 system_pods.go:61] "kube-proxy-qrdxc" [d08c1deb-9b33-4ec1-b15e-6dce465c00d9] Running
	I0916 10:56:24.819908 2120908 system_pods.go:61] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:56:24.819912 2120908 system_pods.go:61] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:56:24.819916 2120908 system_pods.go:61] "kube-scheduler-ha-234759-m03" [deab7491-330f-496f-985f-3b1882eeeb60] Running
	I0916 10:56:24.819919 2120908 system_pods.go:61] "kube-vip-ha-234759" [41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e] Running
	I0916 10:56:24.819923 2120908 system_pods.go:61] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:56:24.819927 2120908 system_pods.go:61] "kube-vip-ha-234759-m03" [44f3fbf3-8803-4d2f-884c-470cdc81e835] Running
	I0916 10:56:24.819934 2120908 system_pods.go:61] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:56:24.819940 2120908 system_pods.go:74] duration metric: took 188.924402ms to wait for pod list to return data ...
	I0916 10:56:24.819952 2120908 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:56:25.004270 2120908 request.go:632] Waited for 184.226709ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:56:25.004409 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:56:25.004444 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:25.004474 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:25.004491 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:25.013504 2120908 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:56:25.014656 2120908 default_sa.go:45] found service account: "default"
	I0916 10:56:25.014742 2120908 default_sa.go:55] duration metric: took 194.78231ms for default service account to be created ...
	I0916 10:56:25.014755 2120908 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:56:25.203253 2120908 request.go:632] Waited for 188.401772ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:56:25.203374 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:56:25.203387 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:25.203396 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:25.203407 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:25.209295 2120908 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:56:25.233918 2120908 system_pods.go:86] 26 kube-system pods found
	I0916 10:56:25.234018 2120908 system_pods.go:89] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:56:25.234050 2120908 system_pods.go:89] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:56:25.234059 2120908 system_pods.go:89] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:56:25.234068 2120908 system_pods.go:89] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:56:25.234073 2120908 system_pods.go:89] "etcd-ha-234759-m03" [701fa3e0-9014-4f92-9734-6b708cb56aa0] Running
	I0916 10:56:25.234078 2120908 system_pods.go:89] "kindnet-jhkc5" [59ea0a34-9a2b-44f2-901f-2bcc13b91a1b] Running
	I0916 10:56:25.234082 2120908 system_pods.go:89] "kindnet-lwtj4" [581ce31f-8039-42fe-a2a8-d64ec858cd32] Running
	I0916 10:56:25.234112 2120908 system_pods.go:89] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:56:25.234129 2120908 system_pods.go:89] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0916 10:56:25.234145 2120908 system_pods.go:89] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:56:25.234152 2120908 system_pods.go:89] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:56:25.234160 2120908 system_pods.go:89] "kube-apiserver-ha-234759-m03" [136cdbc8-ac04-4ca9-85a2-17ff91df20c0] Running
	I0916 10:56:25.234168 2120908 system_pods.go:89] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:56:25.234219 2120908 system_pods.go:89] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 10:56:25.234229 2120908 system_pods.go:89] "kube-controller-manager-ha-234759-m03" [61a9f46c-7889-49f1-afe7-1bbce98f0b5a] Running
	I0916 10:56:25.234244 2120908 system_pods.go:89] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running / Ready:ContainersNotReady (containers with unready status: [kube-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-proxy])
	I0916 10:56:25.234263 2120908 system_pods.go:89] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:56:25.234268 2120908 system_pods.go:89] "kube-proxy-m84xg" [6b84ade0-ae34-4c7e-b1ed-e9c83f4bf130] Running
	I0916 10:56:25.234277 2120908 system_pods.go:89] "kube-proxy-qrdxc" [d08c1deb-9b33-4ec1-b15e-6dce465c00d9] Running
	I0916 10:56:25.234283 2120908 system_pods.go:89] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:56:25.234301 2120908 system_pods.go:89] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:56:25.234306 2120908 system_pods.go:89] "kube-scheduler-ha-234759-m03" [deab7491-330f-496f-985f-3b1882eeeb60] Running
	I0916 10:56:25.234311 2120908 system_pods.go:89] "kube-vip-ha-234759" [41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e] Running
	I0916 10:56:25.234324 2120908 system_pods.go:89] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:56:25.234328 2120908 system_pods.go:89] "kube-vip-ha-234759-m03" [44f3fbf3-8803-4d2f-884c-470cdc81e835] Running
	I0916 10:56:25.234332 2120908 system_pods.go:89] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:56:25.234372 2120908 system_pods.go:126] duration metric: took 219.610201ms to wait for k8s-apps to be running ...
	I0916 10:56:25.234396 2120908 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:56:25.234482 2120908 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:56:25.247845 2120908 system_svc.go:56] duration metric: took 13.4404ms WaitForService to wait for kubelet
	I0916 10:56:25.247885 2120908 kubeadm.go:582] duration metric: took 7.984443167s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:56:25.247905 2120908 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:56:25.404266 2120908 request.go:632] Waited for 156.238165ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:56:25.404322 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:56:25.404328 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:25.404338 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:25.404347 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:25.408199 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:25.409732 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:56:25.409769 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:56:25.409781 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:56:25.409806 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:56:25.409818 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:56:25.409824 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:56:25.409829 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:56:25.409833 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:56:25.409839 2120908 node_conditions.go:105] duration metric: took 161.912164ms to run NodePressure ...
	I0916 10:56:25.409861 2120908 start.go:241] waiting for startup goroutines ...
	I0916 10:56:25.409893 2120908 start.go:255] writing updated cluster config ...
	I0916 10:56:25.413360 2120908 out.go:201] 
	I0916 10:56:25.416133 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:56:25.416265 2120908 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:56:25.419269 2120908 out.go:177] * Starting "ha-234759-m03" control-plane node in "ha-234759" cluster
	I0916 10:56:25.422757 2120908 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:56:25.425519 2120908 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:56:25.428330 2120908 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:56:25.428371 2120908 cache.go:56] Caching tarball of preloaded images
	I0916 10:56:25.428444 2120908 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:56:25.428483 2120908 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:56:25.428495 2120908 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:56:25.428640 2120908 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	W0916 10:56:25.458734 2120908 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:56:25.458757 2120908 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:56:25.458847 2120908 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:56:25.458870 2120908 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:56:25.458877 2120908 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:56:25.458886 2120908 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:56:25.458891 2120908 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:56:25.460243 2120908 image.go:273] response: 
	I0916 10:56:25.578515 2120908 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:56:25.578558 2120908 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:56:25.578590 2120908 start.go:360] acquireMachinesLock for ha-234759-m03: {Name:mk5869e6facf3d1797569b1a88c6d42d2b487fed Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:56:25.578667 2120908 start.go:364] duration metric: took 55.787µs to acquireMachinesLock for "ha-234759-m03"
	I0916 10:56:25.578749 2120908 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:56:25.578772 2120908 fix.go:54] fixHost starting: m03
	I0916 10:56:25.579096 2120908 cli_runner.go:164] Run: docker container inspect ha-234759-m03 --format={{.State.Status}}
	I0916 10:56:25.594503 2120908 fix.go:112] recreateIfNeeded on ha-234759-m03: state=Stopped err=<nil>
	W0916 10:56:25.594531 2120908 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:56:25.597589 2120908 out.go:177] * Restarting existing docker container for "ha-234759-m03" ...
	I0916 10:56:25.599998 2120908 cli_runner.go:164] Run: docker start ha-234759-m03
	I0916 10:56:25.921615 2120908 cli_runner.go:164] Run: docker container inspect ha-234759-m03 --format={{.State.Status}}
	I0916 10:56:25.947173 2120908 kic.go:430] container "ha-234759-m03" state is running.
	I0916 10:56:25.947556 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m03
	I0916 10:56:25.979180 2120908 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:56:25.979435 2120908 machine.go:93] provisionDockerMachine start ...
	I0916 10:56:25.979611 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:56:26.000252 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:56:26.000621 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40632 <nil> <nil>}
	I0916 10:56:26.000637 2120908 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:56:26.008398 2120908 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 10:56:29.211853 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m03
	
	I0916 10:56:29.211952 2120908 ubuntu.go:169] provisioning hostname "ha-234759-m03"
	I0916 10:56:29.212055 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:56:29.244395 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:56:29.244650 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40632 <nil> <nil>}
	I0916 10:56:29.244663 2120908 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759-m03 && echo "ha-234759-m03" | sudo tee /etc/hostname
	I0916 10:56:29.460952 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m03
	
	I0916 10:56:29.461118 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:56:29.486202 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:56:29.486441 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40632 <nil> <nil>}
	I0916 10:56:29.486458 2120908 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759-m03' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759-m03/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759-m03' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:56:29.763573 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:56:29.763667 2120908 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:56:29.763719 2120908 ubuntu.go:177] setting up certificates
	I0916 10:56:29.763747 2120908 provision.go:84] configureAuth start
	I0916 10:56:29.763833 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m03
	I0916 10:56:29.786133 2120908 provision.go:143] copyHostCerts
	I0916 10:56:29.786182 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:56:29.786216 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:56:29.786223 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:56:29.786300 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:56:29.786384 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:56:29.786400 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:56:29.786405 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:56:29.786429 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:56:29.786468 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:56:29.786483 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:56:29.786487 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:56:29.786513 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:56:29.786559 2120908 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759-m03 san=[127.0.0.1 192.168.49.4 ha-234759-m03 localhost minikube]
	I0916 10:56:30.055022 2120908 provision.go:177] copyRemoteCerts
	I0916 10:56:30.055131 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:56:30.055198 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:56:30.089527 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40632 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:56:30.226632 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:56:30.226724 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:56:30.305722 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:56:30.305801 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:56:30.373363 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:56:30.373439 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:56:30.431260 2120908 provision.go:87] duration metric: took 667.486893ms to configureAuth
	I0916 10:56:30.431298 2120908 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:56:30.431568 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:56:30.431582 2120908 machine.go:96] duration metric: took 4.452139303s to provisionDockerMachine
	I0916 10:56:30.431597 2120908 start.go:293] postStartSetup for "ha-234759-m03" (driver="docker")
	I0916 10:56:30.431614 2120908 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:56:30.431686 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:56:30.431738 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:56:30.454591 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40632 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:56:30.562177 2120908 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:56:30.568829 2120908 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:56:30.568868 2120908 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:56:30.568879 2120908 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:56:30.568885 2120908 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:56:30.568896 2120908 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:56:30.568958 2120908 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:56:30.569040 2120908 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:56:30.569051 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:56:30.569158 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:56:30.583173 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:56:30.617002 2120908 start.go:296] duration metric: took 185.373269ms for postStartSetup
	I0916 10:56:30.617114 2120908 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:56:30.617172 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:56:30.640104 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40632 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:56:30.791942 2120908 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:56:30.806561 2120908 fix.go:56] duration metric: took 5.227766913s for fixHost
	I0916 10:56:30.806597 2120908 start.go:83] releasing machines lock for "ha-234759-m03", held for 5.227862715s
	I0916 10:56:30.806827 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m03
	I0916 10:56:30.848787 2120908 out.go:177] * Found network options:
	I0916 10:56:30.851446 2120908 out.go:177]   - NO_PROXY=192.168.49.2,192.168.49.3
	W0916 10:56:30.854197 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:56:30.854233 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:56:30.854257 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:56:30.854267 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:56:30.854348 2120908 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:56:30.854405 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:56:30.854661 2120908 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:56:30.854745 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:56:30.902847 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40632 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:56:30.905561 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40632 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:56:31.049322 2120908 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:56:31.291172 2120908 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:56:31.291274 2120908 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:56:31.307745 2120908 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:56:31.307775 2120908 start.go:495] detecting cgroup driver to use...
	I0916 10:56:31.307818 2120908 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:56:31.307886 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:56:31.329393 2120908 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:56:31.350653 2120908 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:56:31.350755 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:56:31.431158 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:56:31.463192 2120908 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:56:31.859531 2120908 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:56:32.320254 2120908 docker.go:233] disabling docker service ...
	I0916 10:56:32.320410 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:56:32.377116 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:56:32.453428 2120908 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:56:32.804071 2120908 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:56:33.142033 2120908 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:56:33.190425 2120908 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:56:33.261040 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:56:33.298509 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:56:33.345520 2120908 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:56:33.345646 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:56:33.382222 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:56:33.453897 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:56:33.519424 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:56:33.559733 2120908 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:56:33.607399 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:56:33.689895 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:56:33.724438 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:56:33.759121 2120908 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:56:33.795768 2120908 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:56:33.845839 2120908 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:56:34.231650 2120908 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:56:35.104474 2120908 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:56:35.104561 2120908 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:56:35.122399 2120908 start.go:563] Will wait 60s for crictl version
	I0916 10:56:35.122470 2120908 ssh_runner.go:195] Run: which crictl
	I0916 10:56:35.167785 2120908 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:56:35.314161 2120908 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:56:35.314228 2120908 ssh_runner.go:195] Run: containerd --version
	I0916 10:56:35.401608 2120908 ssh_runner.go:195] Run: containerd --version
	I0916 10:56:35.514649 2120908 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:56:35.517628 2120908 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:56:35.520393 2120908 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3
	I0916 10:56:35.523101 2120908 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:56:35.550137 2120908 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:56:35.554408 2120908 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:56:35.609594 2120908 mustload.go:65] Loading cluster: ha-234759
	I0916 10:56:35.609856 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:56:35.610123 2120908 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:56:35.648344 2120908 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:56:35.648619 2120908 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.4
	I0916 10:56:35.648627 2120908 certs.go:194] generating shared ca certs ...
	I0916 10:56:35.648641 2120908 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:56:35.648750 2120908 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:56:35.648793 2120908 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:56:35.648800 2120908 certs.go:256] generating profile certs ...
	I0916 10:56:35.648878 2120908 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key
	I0916 10:56:35.648941 2120908 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.20d6ef76
	I0916 10:56:35.648984 2120908 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key
	I0916 10:56:35.648993 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:56:35.649005 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:56:35.649017 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:56:35.649027 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:56:35.649038 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:56:35.649049 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:56:35.649060 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:56:35.649071 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:56:35.649119 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:56:35.649149 2120908 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:56:35.649157 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:56:35.649219 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:56:35.649255 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:56:35.649280 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:56:35.649328 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:56:35.649362 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:35.649376 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:56:35.649393 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:56:35.649457 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:56:35.691293 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40622 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:56:35.795056 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:56:35.800729 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:56:35.835793 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:56:35.844218 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1679 bytes)
	I0916 10:56:35.865153 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:56:35.874936 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:56:35.905567 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:56:35.909991 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1679 bytes)
	I0916 10:56:35.940985 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:56:35.949403 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:56:35.996082 2120908 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:56:36.012540 2120908 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1675 bytes)
	I0916 10:56:36.047931 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:56:36.108068 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:56:36.136133 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:56:36.173045 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:56:36.201927 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1440 bytes)
	I0916 10:56:36.231713 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:56:36.274308 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:56:36.349805 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 10:56:36.439976 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:56:36.510164 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:56:36.557844 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:56:36.610734 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:56:36.645790 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1679 bytes)
	I0916 10:56:36.686263 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:56:36.733880 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1679 bytes)
	I0916 10:56:36.802880 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:56:36.863208 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1675 bytes)
	I0916 10:56:36.914977 2120908 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:56:36.990141 2120908 ssh_runner.go:195] Run: openssl version
	I0916 10:56:37.004416 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:56:37.019307 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:56:37.025398 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:56:37.025491 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:56:37.036373 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:56:37.049186 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:56:37.061148 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:56:37.066104 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:56:37.066190 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:56:37.074197 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:56:37.093026 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:56:37.112119 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:37.119067 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:37.119143 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:56:37.133485 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:56:37.156328 2120908 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:56:37.167272 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:56:37.184580 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:56:37.199380 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:56:37.210973 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:56:37.225213 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:56:37.237160 2120908 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:56:37.255961 2120908 kubeadm.go:934] updating node {m03 192.168.49.4 8443 v1.31.1 containerd true true} ...
	I0916 10:56:37.256084 2120908 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759-m03 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.4
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:56:37.256117 2120908 kube-vip.go:115] generating kube-vip config ...
	I0916 10:56:37.256180 2120908 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:56:37.278815 2120908 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:56:37.278898 2120908 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:56:37.278970 2120908 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:56:37.308232 2120908 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:56:37.308319 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:56:37.334765 2120908 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:56:37.374414 2120908 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:56:37.413034 2120908 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:56:37.467479 2120908 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:56:37.473644 2120908 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:56:37.485890 2120908 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:56:37.673914 2120908 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:56:37.693323 2120908 start.go:235] Will wait 6m0s for node &{Name:m03 IP:192.168.49.4 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:56:37.693792 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:56:37.695421 2120908 out.go:177] * Verifying Kubernetes components...
	I0916 10:56:37.697146 2120908 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:56:37.867221 2120908 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:56:37.883954 2120908 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:56:37.884262 2120908 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:56:37.884343 2120908 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:56:37.884576 2120908 node_ready.go:35] waiting up to 6m0s for node "ha-234759-m03" to be "Ready" ...
	I0916 10:56:37.884672 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:56:37.884683 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:37.884692 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:37.884705 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:37.887929 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:37.889029 2120908 node_ready.go:49] node "ha-234759-m03" has status "Ready":"True"
	I0916 10:56:37.889085 2120908 node_ready.go:38] duration metric: took 4.487726ms for node "ha-234759-m03" to be "Ready" ...
	I0916 10:56:37.889111 2120908 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:56:37.889213 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:56:37.889241 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:37.889261 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:37.889277 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:37.896397 2120908 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:56:37.909071 2120908 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:56:37.909240 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:37.909268 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:37.909292 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:37.909310 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:37.912766 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:37.914135 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:37.914152 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:37.914161 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:37.914165 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:37.917676 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:38.410037 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:38.410058 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:38.410067 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:38.410071 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:38.413596 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:38.414978 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:38.415003 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:38.415019 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:38.415024 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:38.421063 2120908 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:56:38.909370 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:38.909403 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:38.909414 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:38.909418 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:38.915770 2120908 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:56:38.918349 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:38.918382 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:38.918393 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:38.918400 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:38.928422 2120908 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:56:39.409392 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:39.409417 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:39.409427 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:39.409432 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:39.413706 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:39.414457 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:39.414475 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:39.414484 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:39.414490 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:39.417553 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:39.910336 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:39.910358 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:39.910367 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:39.910371 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:39.913891 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:39.915070 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:39.915095 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:39.915106 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:39.915112 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:39.918159 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:39.918771 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:56:40.410046 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:40.410070 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:40.410079 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:40.410085 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:40.413480 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:40.414647 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:40.414774 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:40.414792 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:40.414797 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:40.417845 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:40.909412 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:40.909433 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:40.909442 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:40.909448 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:40.913099 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:40.913898 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:40.913919 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:40.913929 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:40.913935 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:40.917072 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:41.410132 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:41.410155 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:41.410165 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:41.410170 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:41.413429 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:41.414370 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:41.414390 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:41.414400 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:41.414405 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:41.417840 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:41.909757 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:41.909784 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:41.909794 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:41.909799 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:41.913233 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:41.914031 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:41.914051 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:41.914060 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:41.914069 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:41.917179 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:42.409812 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:42.409835 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:42.409845 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:42.409850 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:42.415268 2120908 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:56:42.416083 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:42.416106 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:42.416117 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:42.416126 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:42.418960 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:42.419812 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:56:42.909717 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:42.909746 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:42.909756 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:42.909760 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:42.913167 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:42.914013 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:42.914033 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:42.914043 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:42.914048 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:42.917141 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:43.410185 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:43.410214 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:43.410223 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:43.410228 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:43.413488 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:43.414283 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:43.414304 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:43.414313 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:43.414321 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:43.417217 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:43.910012 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:43.910039 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:43.910049 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:43.910054 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:43.913541 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:43.914329 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:43.914348 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:43.914357 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:43.914364 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:43.917471 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:44.409749 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:44.409775 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:44.409785 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:44.409789 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:44.413028 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:44.414124 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:44.414145 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:44.414155 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:44.414161 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:44.417147 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:44.909799 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:44.909916 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:44.909932 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:44.909938 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:44.913213 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:44.913983 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:44.914004 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:44.914014 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:44.914018 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:44.917180 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:44.917800 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:56:45.410275 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:45.410297 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:45.410336 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:45.410345 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:45.413796 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:45.414636 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:45.414658 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:45.414668 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:45.414725 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:45.417847 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:45.909377 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:45.909398 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:45.909408 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:45.909413 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:45.912901 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:45.913740 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:45.913757 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:45.913766 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:45.913771 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:45.916670 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:46.409873 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:46.409897 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:46.409907 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:46.409914 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:46.416794 2120908 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:56:46.420577 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:46.420603 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:46.420612 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:46.420617 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:46.424383 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:46.910325 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:46.910350 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:46.910360 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:46.910366 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:46.914705 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:46.916027 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:46.916099 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:46.916122 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:46.916142 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:46.920422 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:46.921600 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:56:47.410270 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:47.410290 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:47.410300 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:47.410303 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:47.422817 2120908 round_trippers.go:574] Response Status: 200 OK in 12 milliseconds
	I0916 10:56:47.425488 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:47.425558 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:47.425583 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:47.425600 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:47.439579 2120908 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 10:56:47.909925 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:47.909989 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:47.910013 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:47.910030 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:47.918489 2120908 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:56:47.924192 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:47.924263 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:47.924285 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:47.924302 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:47.929302 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:56:48.409485 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:48.409523 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:48.409534 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:48.409538 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:48.413141 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:48.413901 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:48.413922 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:48.413933 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:48.413938 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:48.417070 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:48.909381 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:48.909406 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:48.909417 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:48.909433 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:48.912864 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:48.913829 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:48.913850 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:48.913860 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:48.913865 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:48.916747 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:49.409928 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:49.410007 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:49.410030 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:49.410083 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:49.414050 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:49.415102 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:49.415156 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:49.415179 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:49.415197 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:49.418103 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:49.418802 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:56:49.910117 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:49.910140 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:49.910149 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:49.910155 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:49.913730 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:49.914605 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:49.914626 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:49.914636 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:49.914642 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:49.917446 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:50.410281 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:50.410305 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:50.410314 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:50.410319 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:50.413598 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:50.414416 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:50.414470 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:50.414485 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:50.414491 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:50.417493 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:50.909414 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:50.909437 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:50.909451 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:50.909455 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:50.912705 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:50.913747 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:50.913767 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:50.913777 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:50.913783 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:50.916747 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:51.410089 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:51.410113 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:51.410123 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:51.410128 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:51.413720 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:51.414595 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:51.414613 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:51.414623 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:51.414629 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:51.417734 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:51.910051 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:51.910072 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:51.910082 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:51.910086 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:51.913585 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:51.914644 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:51.914667 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:51.914717 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:51.914724 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:51.917401 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:51.918124 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:56:52.409757 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:52.409781 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:52.409791 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:52.409796 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:52.416569 2120908 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:56:52.417521 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:52.417544 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:52.417554 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:52.417559 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:52.421044 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:52.909889 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:52.909959 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:52.909974 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:52.909979 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:52.913835 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:52.914608 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:52.914631 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:52.914641 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:52.914646 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:52.917685 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:53.410061 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:53.410082 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:53.410093 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:53.410097 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:53.413605 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:53.414433 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:53.414450 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:53.414460 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:53.414464 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:53.417535 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:53.909885 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:53.909910 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:53.909919 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:53.909923 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:53.913683 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:53.914576 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:53.914594 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:53.914603 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:53.914608 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:53.917681 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:53.918395 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:56:54.409411 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:54.409437 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:54.409447 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:54.409470 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:54.412853 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:54.413732 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:54.413753 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:54.413762 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:54.413765 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:54.416926 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:54.909962 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:54.909984 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:54.909994 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:54.910000 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:54.913309 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:54.914272 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:54.914292 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:54.914301 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:54.914306 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:54.917117 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:55.410295 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:55.410319 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:55.410329 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:55.410333 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:55.413644 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:55.414639 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:55.414659 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:55.414668 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:55.414714 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:55.417759 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:55.909968 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:55.909987 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:55.909997 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:55.910003 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:55.913321 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:55.914084 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:55.914102 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:55.914112 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:55.914115 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:55.916985 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:56.409801 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:56.409836 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:56.409849 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:56.409853 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:56.413106 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:56.414005 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:56.414023 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:56.414032 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:56.414036 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:56.417037 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:56.417850 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:56:56.909771 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:56.909792 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:56.909801 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:56.909805 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:56.913369 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:56.914204 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:56.914222 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:56.914229 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:56.914233 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:56.917271 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:57.409605 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:57.409631 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:57.409641 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:57.409645 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:57.413007 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:57.414015 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:57.414034 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:57.414044 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:57.414049 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:57.417235 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:57.909754 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:57.909778 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:57.909788 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:57.909793 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:57.913256 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:57.914035 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:57.914055 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:57.914064 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:57.914069 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:57.916858 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:56:58.409974 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:58.409995 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:58.410004 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:58.410008 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:58.413574 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:58.414497 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:58.414518 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:58.414528 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:58.414533 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:58.417892 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:58.418954 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:56:58.910164 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:58.910185 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:58.910195 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:58.910199 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:58.913553 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:58.914340 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:58.914359 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:58.914368 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:58.914374 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:58.917481 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:59.409474 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:59.409497 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:59.409507 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:59.409511 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:59.413194 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:59.414099 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:59.414122 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:59.414130 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:59.414134 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:59.417205 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:59.910352 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:56:59.910376 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:59.910386 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:59.910396 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:59.914297 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:56:59.915156 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:56:59.915179 2120908 round_trippers.go:469] Request Headers:
	I0916 10:56:59.915188 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:56:59.915192 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:56:59.918455 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:00.409445 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:00.409466 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:00.409477 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:00.409481 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:00.449260 2120908 round_trippers.go:574] Response Status: 200 OK in 39 milliseconds
	I0916 10:57:00.450702 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:00.450722 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:00.450732 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:00.450735 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:00.472495 2120908 round_trippers.go:574] Response Status: 200 OK in 21 milliseconds
	I0916 10:57:00.473840 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:57:00.909399 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:00.909424 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:00.909435 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:00.909441 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:00.912707 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:00.913848 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:00.913870 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:00.913878 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:00.913884 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:00.917083 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:01.409999 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:01.410073 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:01.410097 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:01.410115 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:01.413769 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:01.414794 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:01.414818 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:01.414827 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:01.414832 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:01.417655 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:01.909358 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:01.909387 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:01.909399 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:01.909403 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:01.912844 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:01.913733 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:01.913754 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:01.913763 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:01.913767 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:01.916844 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:02.410143 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:02.410167 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:02.410177 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:02.410183 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:02.413871 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:02.414812 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:02.414835 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:02.414844 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:02.414851 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:02.418039 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:02.910199 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:02.910224 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:02.910234 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:02.910242 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:02.913795 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:02.914879 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:02.914901 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:02.914911 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:02.914918 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:02.917801 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:02.918285 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:57:03.410012 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:03.410033 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:03.410042 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:03.410048 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:03.413534 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:03.414305 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:03.414350 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:03.414366 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:03.414371 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:03.417288 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:03.909428 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:03.909496 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:03.909521 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:03.909543 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:03.912950 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:03.913842 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:03.913861 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:03.913870 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:03.913874 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:03.917343 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:04.409716 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:04.409739 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:04.409750 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:04.409755 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:04.413132 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:04.414215 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:04.414238 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:04.414248 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:04.414251 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:04.418435 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:04.910191 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:04.910212 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:04.910221 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:04.910226 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:04.913788 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:04.914476 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:04.914488 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:04.914497 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:04.914502 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:04.917593 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:05.409317 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:05.409340 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:05.409350 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:05.409355 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:05.412563 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:05.413437 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:05.413468 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:05.413478 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:05.413483 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:05.416527 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:05.417063 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:57:05.909532 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:05.909565 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:05.909583 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:05.909614 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:05.912903 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:05.913734 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:05.913753 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:05.913762 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:05.913769 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:05.916727 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:06.409466 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:06.409492 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:06.409502 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:06.409506 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:06.412855 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:06.413914 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:06.413937 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:06.413960 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:06.413964 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:06.417041 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:06.910041 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:06.910064 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:06.910073 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:06.910079 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:06.913627 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:06.914405 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:06.914417 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:06.914428 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:06.914431 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:06.917468 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:07.409389 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:07.409409 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:07.409418 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:07.409422 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:07.412715 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:07.413503 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:07.413523 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:07.413533 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:07.413554 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:07.416555 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:07.417386 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:57:07.909371 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:07.909392 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:07.909402 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:07.909406 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:07.913074 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:07.913786 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:07.913799 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:07.913808 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:07.913812 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:07.917098 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:08.409867 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:08.409892 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:08.409902 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:08.409908 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:08.413345 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:08.414327 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:08.414350 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:08.414359 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:08.414363 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:08.417460 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:08.909797 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:08.909818 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:08.909825 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:08.909829 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:08.913145 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:08.914172 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:08.914192 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:08.914201 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:08.914205 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:08.918037 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:09.409843 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:09.409866 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:09.409875 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:09.409880 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:09.413216 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:09.413880 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:09.413895 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:09.413904 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:09.413909 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:09.416488 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:09.909907 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:09.909936 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:09.909944 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:09.909952 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:09.913135 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:09.913903 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:09.913921 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:09.913936 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:09.913943 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:09.916688 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:09.917198 2120908 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:57:10.409887 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:10.409910 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:10.409924 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:10.409929 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:10.413250 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:10.414157 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:10.414177 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:10.414186 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:10.414191 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:10.417120 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:10.909372 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:10.909396 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:10.909405 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:10.909411 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:10.912783 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:10.913549 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:10.913561 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:10.913571 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:10.913575 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:10.916708 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:11.410205 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:11.410224 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:11.410234 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:11.410239 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:11.413509 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:11.414434 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:11.414459 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:11.414468 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:11.414472 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:11.417259 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:11.910055 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:11.910122 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:11.910161 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:11.910183 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:11.949083 2120908 round_trippers.go:574] Response Status: 200 OK in 38 milliseconds
	I0916 10:57:11.954007 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:11.954088 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:11.954111 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:11.954129 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:11.975167 2120908 round_trippers.go:574] Response Status: 200 OK in 20 milliseconds
	I0916 10:57:11.977322 2120908 pod_ready.go:93] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:11.977397 2120908 pod_ready.go:82] duration metric: took 34.068244952s for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:11.977425 2120908 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:11.977528 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vqj8q
	I0916 10:57:11.977554 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:11.977583 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:11.977611 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:11.991230 2120908 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 10:57:11.992387 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:11.992459 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:11.992481 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:11.992500 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:11.995857 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:11.996783 2120908 pod_ready.go:93] pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:11.996845 2120908 pod_ready.go:82] duration metric: took 19.399781ms for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:11.996871 2120908 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:11.996965 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759
	I0916 10:57:11.996990 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:11.997027 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:11.997048 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:12.004692 2120908 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:57:12.006588 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:12.006781 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:12.006821 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:12.006845 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:12.014756 2120908 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:57:12.015887 2120908 pod_ready.go:93] pod "etcd-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:12.015967 2120908 pod_ready.go:82] duration metric: took 19.075467ms for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:12.015995 2120908 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:12.016104 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:57:12.016131 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:12.016153 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:12.016170 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:12.021142 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:12.022410 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:12.022499 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:12.022527 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:12.022547 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:12.027755 2120908 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:57:12.028855 2120908 pod_ready.go:93] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:12.028935 2120908 pod_ready.go:82] duration metric: took 12.920708ms for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:12.028966 2120908 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:12.029067 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:57:12.029094 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:12.029117 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:12.029136 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:12.035997 2120908 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:12.037211 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:12.037287 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:12.037310 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:12.037330 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:12.041019 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:12.042098 2120908 pod_ready.go:93] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:12.042172 2120908 pod_ready.go:82] duration metric: took 13.171389ms for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:12.042214 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:12.110541 2120908 request.go:632] Waited for 68.216256ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:57:12.110633 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:57:12.110640 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:12.110652 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:12.110656 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:12.115729 2120908 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:57:12.310056 2120908 request.go:632] Waited for 193.246378ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:12.310130 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:12.310137 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:12.310146 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:12.310155 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:12.313493 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:12.314306 2120908 pod_ready.go:93] pod "kube-apiserver-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:12.314366 2120908 pod_ready.go:82] duration metric: took 272.118429ms for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:12.314385 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:12.510989 2120908 request.go:632] Waited for 196.534637ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:57:12.511115 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:57:12.511159 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:12.511177 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:12.511182 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:12.514570 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:12.710904 2120908 request.go:632] Waited for 195.338459ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:12.710959 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:12.710965 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:12.710980 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:12.710986 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:12.714568 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:12.715121 2120908 pod_ready.go:93] pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:12.715141 2120908 pod_ready.go:82] duration metric: took 400.747191ms for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:12.715153 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:12.910534 2120908 request.go:632] Waited for 195.315009ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:57:12.910622 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:57:12.910632 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:12.910641 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:12.910647 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:12.914248 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:13.110418 2120908 request.go:632] Waited for 195.326816ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:13.110531 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:13.110544 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:13.110573 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:13.110586 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:13.113915 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:13.114514 2120908 pod_ready.go:93] pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:13.114538 2120908 pod_ready.go:82] duration metric: took 399.377681ms for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:13.114550 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:13.310963 2120908 request.go:632] Waited for 196.319418ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:57:13.311029 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:57:13.311039 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:13.311048 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:13.311061 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:13.314296 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:13.510431 2120908 request.go:632] Waited for 195.28548ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:13.510504 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:13.510515 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:13.510524 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:13.510536 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:13.513920 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:13.514955 2120908 pod_ready.go:93] pod "kube-controller-manager-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:13.514989 2120908 pod_ready.go:82] duration metric: took 400.424756ms for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:13.515001 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:13.710878 2120908 request.go:632] Waited for 195.781878ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:57:13.710942 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:57:13.710952 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:13.710965 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:13.710970 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:13.714228 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:13.910401 2120908 request.go:632] Waited for 195.326776ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:13.910466 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:13.910475 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:13.910496 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:13.910502 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:13.914662 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:13.915762 2120908 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:13.915785 2120908 pod_ready.go:82] duration metric: took 400.776393ms for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:13.915798 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:14.110422 2120908 request.go:632] Waited for 194.541525ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:57:14.110481 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:57:14.110487 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:14.110496 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:14.110507 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:14.114311 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:14.310398 2120908 request.go:632] Waited for 195.373397ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:14.310470 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:14.310480 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:14.310496 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:14.310513 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:14.317479 2120908 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:14.318473 2120908 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:14.318503 2120908 pod_ready.go:82] duration metric: took 402.696192ms for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:14.318532 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:14.510515 2120908 request.go:632] Waited for 191.901894ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:57:14.510624 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:57:14.510632 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:14.510641 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:14.510646 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:14.514087 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:14.710502 2120908 request.go:632] Waited for 195.348691ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:14.710636 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:14.710653 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:14.710664 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:14.710668 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:14.714424 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:14.715428 2120908 pod_ready.go:93] pod "kube-proxy-f4jm2" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:14.715456 2120908 pod_ready.go:82] duration metric: took 396.90971ms for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:14.715468 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:14.910605 2120908 request.go:632] Waited for 195.066568ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:57:14.910713 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:57:14.910728 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:14.910737 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:14.910742 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:14.914201 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:15.110144 2120908 request.go:632] Waited for 195.248704ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:15.110243 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:15.110252 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:15.110318 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:15.110331 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:15.115288 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:15.116519 2120908 pod_ready.go:93] pod "kube-proxy-gwdl4" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:15.116546 2120908 pod_ready.go:82] duration metric: took 401.069199ms for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:15.116560 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-m84xg" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:15.310198 2120908 request.go:632] Waited for 193.537543ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:57:15.310260 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:57:15.310267 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:15.310276 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:15.310285 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:15.315784 2120908 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:57:15.510829 2120908 request.go:632] Waited for 194.283484ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:15.510890 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:15.510896 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:15.510905 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:15.510910 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:15.515275 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:15.515866 2120908 pod_ready.go:98] node "ha-234759-m04" hosting pod "kube-proxy-m84xg" in "kube-system" namespace is currently not "Ready" (skipping!): node "ha-234759-m04" has status "Ready":"Unknown"
	I0916 10:57:15.515885 2120908 pod_ready.go:82] duration metric: took 399.318228ms for pod "kube-proxy-m84xg" in "kube-system" namespace to be "Ready" ...
	E0916 10:57:15.515911 2120908 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-234759-m04" hosting pod "kube-proxy-m84xg" in "kube-system" namespace is currently not "Ready" (skipping!): node "ha-234759-m04" has status "Ready":"Unknown"
	I0916 10:57:15.515919 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:15.710120 2120908 request.go:632] Waited for 194.135078ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:57:15.710176 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:57:15.710186 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:15.710195 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:15.710203 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:15.713435 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:15.910774 2120908 request.go:632] Waited for 196.403069ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:15.910835 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:15.910841 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:15.910850 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:15.910860 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:15.915444 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:15.916348 2120908 pod_ready.go:93] pod "kube-proxy-qrdxc" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:15.916370 2120908 pod_ready.go:82] duration metric: took 400.443488ms for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:15.916381 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:16.110385 2120908 request.go:632] Waited for 193.915486ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:57:16.110464 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:57:16.110473 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:16.110499 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:16.110504 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:16.120823 2120908 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:57:16.310176 2120908 request.go:632] Waited for 188.252087ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:16.310254 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:16.310267 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:16.310276 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:16.310280 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:16.313227 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:16.314090 2120908 pod_ready.go:93] pod "kube-scheduler-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:16.314112 2120908 pod_ready.go:82] duration metric: took 397.707186ms for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:16.314125 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:16.511018 2120908 request.go:632] Waited for 196.806775ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:57:16.511098 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:57:16.511109 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:16.511118 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:16.511127 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:16.514402 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:16.710429 2120908 request.go:632] Waited for 195.324101ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:16.710484 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:16.710490 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:16.710499 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:16.710508 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:16.713838 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:16.714531 2120908 pod_ready.go:93] pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:16.714553 2120908 pod_ready.go:82] duration metric: took 400.402389ms for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:16.714565 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:16.911019 2120908 request.go:632] Waited for 196.367114ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:57:16.911095 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:57:16.911141 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:16.911156 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:16.911163 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:16.914254 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:17.110385 2120908 request.go:632] Waited for 195.360039ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:17.110552 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:17.110564 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:17.110574 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:17.110580 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:17.114140 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:17.115150 2120908 pod_ready.go:93] pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:17.115175 2120908 pod_ready.go:82] duration metric: took 400.581612ms for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:17.115188 2120908 pod_ready.go:39] duration metric: took 39.226055092s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:57:17.115203 2120908 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:57:17.115270 2120908 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:57:17.130234 2120908 api_server.go:72] duration metric: took 39.436696988s to wait for apiserver process to appear ...
	I0916 10:57:17.130302 2120908 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:57:17.130332 2120908 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:57:17.139237 2120908 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:57:17.139391 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:57:17.139406 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:17.139416 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:17.139420 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:17.140810 2120908 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 10:57:17.140896 2120908 api_server.go:141] control plane version: v1.31.1
	I0916 10:57:17.140916 2120908 api_server.go:131] duration metric: took 10.599787ms to wait for apiserver health ...
	I0916 10:57:17.140925 2120908 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:57:17.310226 2120908 request.go:632] Waited for 169.217514ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:57:17.310301 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:57:17.310312 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:17.310321 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:17.310326 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:17.316921 2120908 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:17.328067 2120908 system_pods.go:59] 26 kube-system pods found
	I0916 10:57:17.328100 2120908 system_pods.go:61] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:57:17.328107 2120908 system_pods.go:61] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:57:17.328112 2120908 system_pods.go:61] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:57:17.328117 2120908 system_pods.go:61] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:57:17.328141 2120908 system_pods.go:61] "etcd-ha-234759-m03" [701fa3e0-9014-4f92-9734-6b708cb56aa0] Running
	I0916 10:57:17.328156 2120908 system_pods.go:61] "kindnet-jhkc5" [59ea0a34-9a2b-44f2-901f-2bcc13b91a1b] Running
	I0916 10:57:17.328161 2120908 system_pods.go:61] "kindnet-lwtj4" [581ce31f-8039-42fe-a2a8-d64ec858cd32] Running
	I0916 10:57:17.328165 2120908 system_pods.go:61] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:57:17.328176 2120908 system_pods.go:61] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:57:17.328180 2120908 system_pods.go:61] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:57:17.328184 2120908 system_pods.go:61] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:57:17.328189 2120908 system_pods.go:61] "kube-apiserver-ha-234759-m03" [136cdbc8-ac04-4ca9-85a2-17ff91df20c0] Running
	I0916 10:57:17.328198 2120908 system_pods.go:61] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:57:17.328203 2120908 system_pods.go:61] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:57:17.328227 2120908 system_pods.go:61] "kube-controller-manager-ha-234759-m03" [61a9f46c-7889-49f1-afe7-1bbce98f0b5a] Running
	I0916 10:57:17.328238 2120908 system_pods.go:61] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:57:17.328242 2120908 system_pods.go:61] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:57:17.328245 2120908 system_pods.go:61] "kube-proxy-m84xg" [6b84ade0-ae34-4c7e-b1ed-e9c83f4bf130] Running
	I0916 10:57:17.328261 2120908 system_pods.go:61] "kube-proxy-qrdxc" [d08c1deb-9b33-4ec1-b15e-6dce465c00d9] Running
	I0916 10:57:17.328273 2120908 system_pods.go:61] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:57:17.328278 2120908 system_pods.go:61] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:57:17.328283 2120908 system_pods.go:61] "kube-scheduler-ha-234759-m03" [deab7491-330f-496f-985f-3b1882eeeb60] Running
	I0916 10:57:17.328293 2120908 system_pods.go:61] "kube-vip-ha-234759" [7dcc7fb0-91ad-4d60-a7fd-3c49deeb0ac0] Running
	I0916 10:57:17.328297 2120908 system_pods.go:61] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:57:17.328300 2120908 system_pods.go:61] "kube-vip-ha-234759-m03" [44f3fbf3-8803-4d2f-884c-470cdc81e835] Running
	I0916 10:57:17.328304 2120908 system_pods.go:61] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:57:17.328310 2120908 system_pods.go:74] duration metric: took 187.376844ms to wait for pod list to return data ...
	I0916 10:57:17.328323 2120908 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:57:17.510997 2120908 request.go:632] Waited for 182.575643ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:57:17.511059 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:57:17.511071 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:17.511080 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:17.511094 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:17.514812 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:17.514943 2120908 default_sa.go:45] found service account: "default"
	I0916 10:57:17.514962 2120908 default_sa.go:55] duration metric: took 186.617357ms for default service account to be created ...
	I0916 10:57:17.514973 2120908 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:57:17.710375 2120908 request.go:632] Waited for 195.335875ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:57:17.710468 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:57:17.710481 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:17.710490 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:17.710499 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:17.716706 2120908 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:17.735995 2120908 system_pods.go:86] 26 kube-system pods found
	I0916 10:57:17.736033 2120908 system_pods.go:89] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:57:17.736042 2120908 system_pods.go:89] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:57:17.736048 2120908 system_pods.go:89] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:57:17.736079 2120908 system_pods.go:89] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:57:17.736092 2120908 system_pods.go:89] "etcd-ha-234759-m03" [701fa3e0-9014-4f92-9734-6b708cb56aa0] Running
	I0916 10:57:17.736104 2120908 system_pods.go:89] "kindnet-jhkc5" [59ea0a34-9a2b-44f2-901f-2bcc13b91a1b] Running
	I0916 10:57:17.736115 2120908 system_pods.go:89] "kindnet-lwtj4" [581ce31f-8039-42fe-a2a8-d64ec858cd32] Running
	I0916 10:57:17.736125 2120908 system_pods.go:89] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:57:17.736134 2120908 system_pods.go:89] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:57:17.736154 2120908 system_pods.go:89] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:57:17.736165 2120908 system_pods.go:89] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:57:17.736179 2120908 system_pods.go:89] "kube-apiserver-ha-234759-m03" [136cdbc8-ac04-4ca9-85a2-17ff91df20c0] Running
	I0916 10:57:17.736195 2120908 system_pods.go:89] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:57:17.736205 2120908 system_pods.go:89] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:57:17.736211 2120908 system_pods.go:89] "kube-controller-manager-ha-234759-m03" [61a9f46c-7889-49f1-afe7-1bbce98f0b5a] Running
	I0916 10:57:17.736216 2120908 system_pods.go:89] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:57:17.736229 2120908 system_pods.go:89] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:57:17.736234 2120908 system_pods.go:89] "kube-proxy-m84xg" [6b84ade0-ae34-4c7e-b1ed-e9c83f4bf130] Running
	I0916 10:57:17.736243 2120908 system_pods.go:89] "kube-proxy-qrdxc" [d08c1deb-9b33-4ec1-b15e-6dce465c00d9] Running
	I0916 10:57:17.736259 2120908 system_pods.go:89] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:57:17.736269 2120908 system_pods.go:89] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:57:17.736288 2120908 system_pods.go:89] "kube-scheduler-ha-234759-m03" [deab7491-330f-496f-985f-3b1882eeeb60] Running
	I0916 10:57:17.736299 2120908 system_pods.go:89] "kube-vip-ha-234759" [7dcc7fb0-91ad-4d60-a7fd-3c49deeb0ac0] Running
	I0916 10:57:17.736305 2120908 system_pods.go:89] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:57:17.736309 2120908 system_pods.go:89] "kube-vip-ha-234759-m03" [44f3fbf3-8803-4d2f-884c-470cdc81e835] Running
	I0916 10:57:17.736313 2120908 system_pods.go:89] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:57:17.736329 2120908 system_pods.go:126] duration metric: took 221.346126ms to wait for k8s-apps to be running ...
	I0916 10:57:17.736341 2120908 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:57:17.736432 2120908 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:57:17.749671 2120908 system_svc.go:56] duration metric: took 13.319146ms WaitForService to wait for kubelet
	I0916 10:57:17.749703 2120908 kubeadm.go:582] duration metric: took 40.056171186s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:57:17.749741 2120908 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:57:17.910112 2120908 request.go:632] Waited for 160.277755ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:57:17.910179 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:57:17.910193 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:17.910208 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:17.910214 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:17.914415 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:17.915916 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:57:17.915946 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:57:17.915957 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:57:17.915963 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:57:17.915967 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:57:17.916000 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:57:17.916010 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:57:17.916015 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:57:17.916020 2120908 node_conditions.go:105] duration metric: took 166.267133ms to run NodePressure ...
	I0916 10:57:17.916032 2120908 start.go:241] waiting for startup goroutines ...
	I0916 10:57:17.916064 2120908 start.go:255] writing updated cluster config ...
	I0916 10:57:17.919561 2120908 out.go:201] 
	I0916 10:57:17.922532 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:57:17.922660 2120908 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:57:17.925838 2120908 out.go:177] * Starting "ha-234759-m04" worker node in "ha-234759" cluster
	I0916 10:57:17.928721 2120908 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:57:17.931811 2120908 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:57:17.934281 2120908 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:57:17.934320 2120908 cache.go:56] Caching tarball of preloaded images
	I0916 10:57:17.934366 2120908 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:57:17.934447 2120908 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:57:17.934459 2120908 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:57:17.934634 2120908 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	W0916 10:57:17.964651 2120908 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:57:17.964673 2120908 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:57:17.964763 2120908 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:57:17.964782 2120908 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:57:17.964787 2120908 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:57:17.964794 2120908 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:57:17.964800 2120908 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:57:17.966398 2120908 image.go:273] response: 
	I0916 10:57:18.095679 2120908 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:57:18.095725 2120908 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:57:18.095769 2120908 start.go:360] acquireMachinesLock for ha-234759-m04: {Name:mk1736d6dcf4c8e0380a733a9a6df0f9a97ba403 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:57:18.095843 2120908 start.go:364] duration metric: took 50.125µs to acquireMachinesLock for "ha-234759-m04"
	I0916 10:57:18.095876 2120908 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:57:18.095887 2120908 fix.go:54] fixHost starting: m04
	I0916 10:57:18.096191 2120908 cli_runner.go:164] Run: docker container inspect ha-234759-m04 --format={{.State.Status}}
	I0916 10:57:18.117353 2120908 fix.go:112] recreateIfNeeded on ha-234759-m04: state=Stopped err=<nil>
	W0916 10:57:18.117379 2120908 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:57:18.120400 2120908 out.go:177] * Restarting existing docker container for "ha-234759-m04" ...
	I0916 10:57:18.122981 2120908 cli_runner.go:164] Run: docker start ha-234759-m04
	I0916 10:57:18.532837 2120908 cli_runner.go:164] Run: docker container inspect ha-234759-m04 --format={{.State.Status}}
	I0916 10:57:18.557439 2120908 kic.go:430] container "ha-234759-m04" state is running.
	I0916 10:57:18.557995 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m04
	I0916 10:57:18.588865 2120908 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:57:18.590996 2120908 machine.go:93] provisionDockerMachine start ...
	I0916 10:57:18.591081 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:57:18.613339 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:57:18.613573 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40637 <nil> <nil>}
	I0916 10:57:18.613583 2120908 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:57:18.614920 2120908 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:42738->127.0.0.1:40637: read: connection reset by peer
	I0916 10:57:21.759588 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m04
	
	I0916 10:57:21.759617 2120908 ubuntu.go:169] provisioning hostname "ha-234759-m04"
	I0916 10:57:21.759695 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:57:21.778965 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:57:21.779208 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40637 <nil> <nil>}
	I0916 10:57:21.779223 2120908 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759-m04 && echo "ha-234759-m04" | sudo tee /etc/hostname
	I0916 10:57:21.962836 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m04
	
	I0916 10:57:21.962929 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:57:21.984213 2120908 main.go:141] libmachine: Using SSH client type: native
	I0916 10:57:21.984462 2120908 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40637 <nil> <nil>}
	I0916 10:57:21.984485 2120908 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759-m04' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759-m04/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759-m04' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:57:22.135148 2120908 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:57:22.135198 2120908 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:57:22.135225 2120908 ubuntu.go:177] setting up certificates
	I0916 10:57:22.135241 2120908 provision.go:84] configureAuth start
	I0916 10:57:22.135310 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m04
	I0916 10:57:22.153467 2120908 provision.go:143] copyHostCerts
	I0916 10:57:22.153512 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:57:22.153545 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:57:22.153557 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:57:22.153631 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:57:22.153717 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:57:22.153740 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:57:22.153748 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:57:22.153776 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:57:22.153824 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:57:22.153846 2120908 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:57:22.153855 2120908 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:57:22.153882 2120908 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:57:22.153944 2120908 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759-m04 san=[127.0.0.1 192.168.49.5 ha-234759-m04 localhost minikube]
	I0916 10:57:22.829241 2120908 provision.go:177] copyRemoteCerts
	I0916 10:57:22.829309 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:57:22.829355 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:57:22.856893 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40637 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m04/id_rsa Username:docker}
	I0916 10:57:22.965208 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:57:22.965271 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:57:22.995853 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:57:22.995933 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:57:23.026726 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:57:23.026792 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:57:23.054303 2120908 provision.go:87] duration metric: took 919.043797ms to configureAuth
	I0916 10:57:23.054330 2120908 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:57:23.054581 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:57:23.054593 2120908 machine.go:96] duration metric: took 4.463582852s to provisionDockerMachine
	I0916 10:57:23.054601 2120908 start.go:293] postStartSetup for "ha-234759-m04" (driver="docker")
	I0916 10:57:23.054612 2120908 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:57:23.054667 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:57:23.054758 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:57:23.076267 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40637 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m04/id_rsa Username:docker}
	I0916 10:57:23.178262 2120908 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:57:23.183381 2120908 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:57:23.183421 2120908 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:57:23.183432 2120908 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:57:23.183439 2120908 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:57:23.183450 2120908 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:57:23.183512 2120908 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:57:23.183602 2120908 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:57:23.183617 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:57:23.183721 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:57:23.193745 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:57:23.232269 2120908 start.go:296] duration metric: took 177.650431ms for postStartSetup
	I0916 10:57:23.232358 2120908 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:57:23.232401 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:57:23.255220 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40637 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m04/id_rsa Username:docker}
	I0916 10:57:23.353715 2120908 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:57:23.359295 2120908 fix.go:56] duration metric: took 5.263400659s for fixHost
	I0916 10:57:23.359323 2120908 start.go:83] releasing machines lock for "ha-234759-m04", held for 5.263460482s
	I0916 10:57:23.359399 2120908 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m04
	I0916 10:57:23.398256 2120908 out.go:177] * Found network options:
	I0916 10:57:23.400933 2120908 out.go:177]   - NO_PROXY=192.168.49.2,192.168.49.3,192.168.49.4
	W0916 10:57:23.403481 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:57:23.403517 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:57:23.403529 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:57:23.403553 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:57:23.403563 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:57:23.403576 2120908 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:57:23.403652 2120908 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:57:23.403696 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:57:23.403961 2120908 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:57:23.404016 2120908 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:57:23.430280 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40637 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m04/id_rsa Username:docker}
	I0916 10:57:23.439826 2120908 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40637 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m04/id_rsa Username:docker}
	I0916 10:57:23.547411 2120908 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:57:23.701153 2120908 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:57:23.701236 2120908 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:57:23.721993 2120908 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:57:23.722021 2120908 start.go:495] detecting cgroup driver to use...
	I0916 10:57:23.722056 2120908 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:57:23.722119 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:57:23.740439 2120908 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:57:23.754065 2120908 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:57:23.754137 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:57:23.770443 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:57:23.783979 2120908 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:57:23.953730 2120908 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:57:24.084090 2120908 docker.go:233] disabling docker service ...
	I0916 10:57:24.084214 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:57:24.099388 2120908 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:57:24.114783 2120908 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:57:24.220209 2120908 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:57:24.328853 2120908 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:57:24.344437 2120908 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:57:24.378961 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:57:24.393055 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:57:24.409347 2120908 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:57:24.409465 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:57:24.422383 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:57:24.434369 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:57:24.445242 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:57:24.457156 2120908 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:57:24.469789 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:57:24.483090 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:57:24.495464 2120908 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:57:24.512147 2120908 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:57:24.527327 2120908 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:57:24.537382 2120908 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:57:24.626908 2120908 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:57:24.830647 2120908 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:57:24.830770 2120908 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:57:24.835863 2120908 start.go:563] Will wait 60s for crictl version
	I0916 10:57:24.835950 2120908 ssh_runner.go:195] Run: which crictl
	I0916 10:57:24.840327 2120908 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:57:24.900790 2120908 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:57:24.900885 2120908 ssh_runner.go:195] Run: containerd --version
	I0916 10:57:24.936606 2120908 ssh_runner.go:195] Run: containerd --version
	I0916 10:57:24.968621 2120908 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:57:24.971388 2120908 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:57:24.973949 2120908 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3
	I0916 10:57:24.976611 2120908 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3,192.168.49.4
	I0916 10:57:24.979264 2120908 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:57:24.995724 2120908 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:57:25.000291 2120908 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:57:25.018458 2120908 mustload.go:65] Loading cluster: ha-234759
	I0916 10:57:25.018888 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:57:25.019196 2120908 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:57:25.045159 2120908 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:57:25.045454 2120908 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.5
	I0916 10:57:25.045470 2120908 certs.go:194] generating shared ca certs ...
	I0916 10:57:25.045485 2120908 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:57:25.045601 2120908 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:57:25.045650 2120908 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:57:25.045666 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:57:25.045680 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:57:25.045694 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:57:25.045706 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:57:25.045766 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:57:25.045797 2120908 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:57:25.045806 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:57:25.045835 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:57:25.045869 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:57:25.045904 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:57:25.047601 2120908 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:57:25.047657 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:57:25.047673 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:25.047686 2120908 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:57:25.047706 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:57:25.080302 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:57:25.121279 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:57:25.155156 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:57:25.188002 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:57:25.217933 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:57:25.250894 2120908 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:57:25.279223 2120908 ssh_runner.go:195] Run: openssl version
	I0916 10:57:25.287167 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:57:25.297621 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:57:25.302195 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:57:25.302311 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:57:25.309263 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:57:25.318776 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:57:25.331039 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:25.335635 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:25.335732 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:57:25.343082 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:57:25.352411 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:57:25.362672 2120908 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:57:25.366579 2120908 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:57:25.366645 2120908 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:57:25.378543 2120908 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:57:25.397170 2120908 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:57:25.401090 2120908 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:57:25.401153 2120908 kubeadm.go:934] updating node {m04 192.168.49.5 0 v1.31.1  false true} ...
	I0916 10:57:25.401259 2120908 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759-m04 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.5
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:57:25.401347 2120908 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:57:25.411417 2120908 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:57:25.411510 2120908 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 10:57:25.422624 2120908 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:57:25.463966 2120908 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:57:25.488926 2120908 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:57:25.492887 2120908 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:57:25.504853 2120908 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:57:25.607140 2120908 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:57:25.620936 2120908 start.go:235] Will wait 6m0s for node &{Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}
	I0916 10:57:25.621341 2120908 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:57:25.625396 2120908 out.go:177] * Verifying Kubernetes components...
	I0916 10:57:25.627786 2120908 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:57:25.721156 2120908 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:57:25.734540 2120908 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:57:25.734918 2120908 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:57:25.734984 2120908 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:57:25.735260 2120908 node_ready.go:35] waiting up to 6m0s for node "ha-234759-m04" to be "Ready" ...
	I0916 10:57:25.735340 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:25.735354 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:25.735362 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:25.735368 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:25.742587 2120908 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:57:26.236427 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:26.236448 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:26.236458 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:26.236463 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:26.239456 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:26.735544 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:26.735565 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:26.735575 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:26.735579 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:26.739362 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:27.236253 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:27.236316 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:27.236331 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:27.236336 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:27.240374 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:27.735445 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:27.735472 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:27.735483 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:27.735488 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:27.738851 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:27.739797 2120908 node_ready.go:53] node "ha-234759-m04" has status "Ready":"Unknown"
	I0916 10:57:28.236027 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:28.236068 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:28.236078 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:28.236085 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:28.239225 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:28.735715 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:28.735735 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:28.735745 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:28.735748 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:28.738974 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:29.235932 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:29.235955 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:29.235965 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:29.235969 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:29.239733 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:29.735932 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:29.735953 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:29.735963 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:29.735967 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:29.739162 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:29.739938 2120908 node_ready.go:53] node "ha-234759-m04" has status "Ready":"Unknown"
	I0916 10:57:30.235438 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:30.235463 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:30.235478 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:30.235484 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:30.239214 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:30.735892 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:30.735913 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:30.735924 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:30.735930 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:30.739267 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:31.236072 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:31.236114 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:31.236124 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:31.236128 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:31.239364 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:31.735608 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:31.735630 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:31.735641 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:31.735647 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:31.738896 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:31.740570 2120908 node_ready.go:53] node "ha-234759-m04" has status "Ready":"Unknown"
	I0916 10:57:32.235951 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:32.235974 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:32.235984 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:32.235988 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:32.239245 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:32.735479 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:32.735502 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:32.735511 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:32.735517 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:32.738848 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:33.235481 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:33.235502 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.235512 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.235517 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.239071 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:33.239729 2120908 node_ready.go:49] node "ha-234759-m04" has status "Ready":"True"
	I0916 10:57:33.239754 2120908 node_ready.go:38] duration metric: took 7.504476165s for node "ha-234759-m04" to be "Ready" ...
	I0916 10:57:33.239764 2120908 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:57:33.239836 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:57:33.239847 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.239855 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.239860 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.246133 2120908 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:33.256571 2120908 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:33.256676 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:57:33.256688 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.256697 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.256704 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.260151 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:33.261092 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:33.261110 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.261118 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.261123 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.264955 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:33.267109 2120908 pod_ready.go:93] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:33.267140 2120908 pod_ready.go:82] duration metric: took 10.536394ms for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:33.267152 2120908 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:33.267222 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vqj8q
	I0916 10:57:33.267231 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.267240 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.267243 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.270973 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:33.272060 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:33.272084 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.272095 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.272101 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.275184 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:33.276013 2120908 pod_ready.go:93] pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:33.276035 2120908 pod_ready.go:82] duration metric: took 8.875095ms for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:33.276046 2120908 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:33.276111 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759
	I0916 10:57:33.276122 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.276130 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.276135 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.279288 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:33.279935 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:33.279959 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.279969 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.279974 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.283457 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:33.284179 2120908 pod_ready.go:93] pod "etcd-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:33.284202 2120908 pod_ready.go:82] duration metric: took 8.148479ms for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:33.284213 2120908 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:33.284326 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:57:33.284336 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.284346 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.284355 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.287610 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:33.288648 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:33.288669 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.288680 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.288684 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.292634 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:33.293214 2120908 pod_ready.go:93] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:33.293236 2120908 pod_ready.go:82] duration metric: took 8.99242ms for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:33.293248 2120908 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:33.435566 2120908 request.go:632] Waited for 142.22789ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:57:33.435676 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:57:33.435689 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.435698 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.435704 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.439940 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:33.635869 2120908 request.go:632] Waited for 195.232172ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:33.635925 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:33.635931 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.635941 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.635949 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.639623 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:33.640228 2120908 pod_ready.go:93] pod "etcd-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:33.640280 2120908 pod_ready.go:82] duration metric: took 346.99583ms for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:33.640318 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:33.836158 2120908 request.go:632] Waited for 195.725329ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:57:33.836218 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:57:33.836224 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:33.836231 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:33.836238 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:33.839390 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:34.036427 2120908 request.go:632] Waited for 196.264354ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:34.036485 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:34.036491 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:34.036500 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:34.036508 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:34.039953 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:34.040465 2120908 pod_ready.go:93] pod "kube-apiserver-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:34.040485 2120908 pod_ready.go:82] duration metric: took 400.134855ms for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:34.040497 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:34.236426 2120908 request.go:632] Waited for 195.858169ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:57:34.236544 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:57:34.236581 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:34.236610 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:34.236629 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:34.240071 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:34.435592 2120908 request.go:632] Waited for 194.247259ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:34.435702 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:34.435736 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:34.435762 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:34.435780 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:34.439258 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:34.439815 2120908 pod_ready.go:93] pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:34.439837 2120908 pod_ready.go:82] duration metric: took 399.328288ms for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:34.439849 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:34.636288 2120908 request.go:632] Waited for 196.327369ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:57:34.636355 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:57:34.636364 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:34.636378 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:34.636385 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:34.640543 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:34.835836 2120908 request.go:632] Waited for 194.339163ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:34.835944 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:34.835964 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:34.835996 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:34.836021 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:34.839901 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:34.840545 2120908 pod_ready.go:93] pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:34.840563 2120908 pod_ready.go:82] duration metric: took 400.688583ms for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:34.840575 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:35.035892 2120908 request.go:632] Waited for 195.250026ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:57:35.035972 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:57:35.035983 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:35.035991 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:35.036000 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:35.040059 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:35.235707 2120908 request.go:632] Waited for 194.919451ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:35.235765 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:35.235776 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:35.235785 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:35.235807 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:35.239413 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:35.240069 2120908 pod_ready.go:93] pod "kube-controller-manager-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:35.240092 2120908 pod_ready.go:82] duration metric: took 399.507511ms for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:35.240103 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:35.435999 2120908 request.go:632] Waited for 195.811639ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:57:35.436092 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:57:35.436106 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:35.436116 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:35.436153 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:35.439498 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:35.635916 2120908 request.go:632] Waited for 195.242149ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:35.636640 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:35.636676 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:35.636707 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:35.636726 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:35.645383 2120908 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:57:35.647519 2120908 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:35.647545 2120908 pod_ready.go:82] duration metric: took 407.416295ms for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:35.647557 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:35.835601 2120908 request.go:632] Waited for 187.954285ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:57:35.835714 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:57:35.835726 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:35.835746 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:35.835756 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:35.839251 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:36.036462 2120908 request.go:632] Waited for 196.351516ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:36.036529 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:36.036536 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:36.036545 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:36.036549 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:36.040165 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:36.040862 2120908 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:36.040886 2120908 pod_ready.go:82] duration metric: took 393.298737ms for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:36.040899 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:36.236385 2120908 request.go:632] Waited for 195.394714ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:57:36.236445 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:57:36.236451 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:36.236465 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:36.236472 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:36.240339 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:36.436400 2120908 request.go:632] Waited for 195.28859ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:36.436457 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:36.436464 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:36.436472 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:36.436487 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:36.439526 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:36.440232 2120908 pod_ready.go:93] pod "kube-proxy-f4jm2" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:36.440254 2120908 pod_ready.go:82] duration metric: took 399.347512ms for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:36.440267 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:36.635805 2120908 request.go:632] Waited for 195.468658ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:57:36.635921 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:57:36.635933 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:36.635944 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:36.635950 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:36.639698 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:36.835931 2120908 request.go:632] Waited for 195.203815ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:36.836050 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:36.836111 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:36.836142 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:36.836162 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:36.846985 2120908 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:57:36.848105 2120908 pod_ready.go:93] pod "kube-proxy-gwdl4" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:36.848171 2120908 pod_ready.go:82] duration metric: took 407.885289ms for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:36.848199 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-m84xg" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:37.035548 2120908 request.go:632] Waited for 187.245907ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:57:37.035687 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:57:37.035697 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:37.035707 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:37.035712 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:37.042111 2120908 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:37.235600 2120908 request.go:632] Waited for 192.261228ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:37.235661 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:37.235674 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:37.235682 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:37.235690 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:37.238728 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:37.436493 2120908 request.go:632] Waited for 87.161468ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:57:37.436554 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:57:37.436559 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:37.436576 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:37.436580 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:37.440751 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:37.636089 2120908 request.go:632] Waited for 194.703248ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:37.636162 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:37.636175 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:37.636184 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:37.636194 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:37.639765 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:37.848774 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:57:37.848797 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:37.848807 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:37.848812 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:37.852250 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:38.036281 2120908 request.go:632] Waited for 183.230507ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:38.036366 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:38.036373 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:38.036383 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:38.036391 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:38.040057 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:38.349102 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:57:38.349123 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:38.349133 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:38.349137 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:38.353565 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:38.435540 2120908 request.go:632] Waited for 81.129677ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:38.435656 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:38.435677 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:38.435702 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:38.435716 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:38.440735 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:38.848487 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:57:38.848506 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:38.848515 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:38.848519 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:38.851868 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:38.852952 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:38.852969 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:38.852978 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:38.852983 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:38.856249 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:38.857264 2120908 pod_ready.go:103] pod "kube-proxy-m84xg" in "kube-system" namespace has status "Ready":"False"
	I0916 10:57:39.349027 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:57:39.349048 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:39.349057 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:39.349062 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:39.353422 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:39.354769 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:57:39.354791 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:39.354801 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:39.354807 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:39.357780 2120908 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:57:39.358926 2120908 pod_ready.go:93] pod "kube-proxy-m84xg" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:39.358992 2120908 pod_ready.go:82] duration metric: took 2.510771089s for pod "kube-proxy-m84xg" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:39.359019 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:39.436317 2120908 request.go:632] Waited for 77.201686ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:57:39.436393 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:57:39.436399 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:39.436408 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:39.436413 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:39.439876 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:39.635560 2120908 request.go:632] Waited for 194.238258ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:39.635617 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:39.635623 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:39.635631 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:39.635636 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:39.638765 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:39.639734 2120908 pod_ready.go:93] pod "kube-proxy-qrdxc" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:39.639756 2120908 pod_ready.go:82] duration metric: took 280.717752ms for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:39.639767 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:39.836145 2120908 request.go:632] Waited for 196.311927ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:57:39.836221 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:57:39.836231 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:39.836240 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:39.836244 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:39.841093 2120908 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:57:40.036399 2120908 request.go:632] Waited for 194.5261ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:40.036470 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:57:40.036477 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:40.036485 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:40.036490 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:40.040265 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:40.040902 2120908 pod_ready.go:93] pod "kube-scheduler-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:40.040969 2120908 pod_ready.go:82] duration metric: took 401.192629ms for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:40.041000 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:40.235885 2120908 request.go:632] Waited for 194.80641ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:57:40.235989 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:57:40.236000 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:40.236009 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:40.236021 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:40.239682 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:40.435577 2120908 request.go:632] Waited for 195.193837ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:40.435633 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:57:40.435641 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:40.435650 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:40.435662 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:40.439477 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:40.440132 2120908 pod_ready.go:93] pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:40.440153 2120908 pod_ready.go:82] duration metric: took 399.137052ms for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:40.440165 2120908 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:40.636104 2120908 request.go:632] Waited for 195.872709ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:57:40.636163 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:57:40.636169 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:40.636178 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:40.636188 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:40.639643 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:40.835702 2120908 request.go:632] Waited for 195.283437ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:40.835763 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:57:40.835773 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:40.835782 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:40.835785 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:40.841941 2120908 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:57:40.843098 2120908 pod_ready.go:93] pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace has status "Ready":"True"
	I0916 10:57:40.843119 2120908 pod_ready.go:82] duration metric: took 402.946694ms for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:57:40.843134 2120908 pod_ready.go:39] duration metric: took 7.603358665s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:57:40.843148 2120908 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:57:40.843226 2120908 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:57:40.857975 2120908 system_svc.go:56] duration metric: took 14.818123ms WaitForService to wait for kubelet
	I0916 10:57:40.858003 2120908 kubeadm.go:582] duration metric: took 15.23702194s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:57:40.858021 2120908 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:57:41.036470 2120908 request.go:632] Waited for 178.358135ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:57:41.036559 2120908 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:57:41.036566 2120908 round_trippers.go:469] Request Headers:
	I0916 10:57:41.036575 2120908 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:57:41.036597 2120908 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:57:41.040419 2120908 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:57:41.042502 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:57:41.042535 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:57:41.042548 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:57:41.042553 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:57:41.042558 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:57:41.042588 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:57:41.042600 2120908 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:57:41.042605 2120908 node_conditions.go:123] node cpu capacity is 2
	I0916 10:57:41.042610 2120908 node_conditions.go:105] duration metric: took 184.583401ms to run NodePressure ...
	I0916 10:57:41.042626 2120908 start.go:241] waiting for startup goroutines ...
	I0916 10:57:41.042661 2120908 start.go:255] writing updated cluster config ...
	I0916 10:57:41.043065 2120908 ssh_runner.go:195] Run: rm -f paused
	I0916 10:57:41.051755 2120908 out.go:177] * Done! kubectl is now configured to use "ha-234759" cluster and "default" namespace by default
	E0916 10:57:41.054517 2120908 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	35d6358938dea       ba04bb24b9575       36 seconds ago       Running             storage-provisioner       2                   58d0e7de3b6ee       storage-provisioner
	41e6b84968e22       2f6c962e7b831       About a minute ago   Running             coredns                   1                   30fba837e76d0       coredns-7c65d6cfc9-vqj8q
	184672af3370a       89a35e2ebb6b9       About a minute ago   Running             busybox                   1                   f0ac92849bc1a       busybox-7dff88458-kjr9x
	d108cfac8fd4f       2f6c962e7b831       About a minute ago   Running             coredns                   1                   c40853737334c       coredns-7c65d6cfc9-2l4br
	9806835e6e060       6a23fa8fd2b78       About a minute ago   Running             kindnet-cni               1                   31ebd5494900f       kindnet-q8nl6
	ad8481816fac5       ba04bb24b9575       About a minute ago   Exited              storage-provisioner       1                   58d0e7de3b6ee       storage-provisioner
	e617f849070ed       24a140c548c07       About a minute ago   Running             kube-proxy                1                   43f22af3432c4       kube-proxy-gwdl4
	d4bb975eec45b       7f8aa378bb47d       About a minute ago   Running             kube-scheduler            1                   34a94e86a2890       kube-scheduler-ha-234759
	bfcc891f8e795       7e2a4e229620b       About a minute ago   Running             kube-vip                  0                   6997a47264a16       kube-vip-ha-234759
	b43574d882fbc       27e3830e14027       About a minute ago   Running             etcd                      1                   9b720087da357       etcd-ha-234759
	63b795aabed00       279f381cb3736       About a minute ago   Running             kube-controller-manager   1                   1c3a264f753d8       kube-controller-manager-ha-234759
	2b3270e2806d2       d3f53a98c0a9d       About a minute ago   Running             kube-apiserver            1                   194a5ade9b5b7       kube-apiserver-ha-234759
	7e2e09055b617       89a35e2ebb6b9       4 minutes ago        Exited              busybox                   0                   59f93d3cf6ebc       busybox-7dff88458-kjr9x
	0e47bf675d7df       2f6c962e7b831       5 minutes ago        Exited              coredns                   0                   297acf9bc71e4       coredns-7c65d6cfc9-2l4br
	e629c24c41e32       2f6c962e7b831       5 minutes ago        Exited              coredns                   0                   88f003522915c       coredns-7c65d6cfc9-vqj8q
	7d51a8f7f42ff       6a23fa8fd2b78       6 minutes ago        Exited              kindnet-cni               0                   ae1a0829d833f       kindnet-q8nl6
	900d2ad5148fe       24a140c548c07       6 minutes ago        Exited              kube-proxy                0                   4e5ecfb50c3cd       kube-proxy-gwdl4
	324a547043689       27e3830e14027       6 minutes ago        Exited              etcd                      0                   fa0cb25bfd24a       etcd-ha-234759
	5a7d53b11a05f       7f8aa378bb47d       6 minutes ago        Exited              kube-scheduler            0                   56f2eb27f3396       kube-scheduler-ha-234759
	a7002833ce71b       279f381cb3736       6 minutes ago        Exited              kube-controller-manager   0                   d4248c2bf66dc       kube-controller-manager-ha-234759
	fd48034050bae       d3f53a98c0a9d       6 minutes ago        Exited              kube-apiserver            0                   2d1650dd1ced5       kube-apiserver-ha-234759
	
	
	==> containerd <==
	Sep 16 10:57:03 ha-234759 containerd[574]: time="2024-09-16T10:57:03.322738230Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.076492830Z" level=info msg="RemoveContainer for \"2586d6167e7558670ba282e8e630c6301933dfdf0302bef7718de8fada959378\""
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.083385300Z" level=info msg="RemoveContainer for \"2586d6167e7558670ba282e8e630c6301933dfdf0302bef7718de8fada959378\" returns successfully"
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.419402719Z" level=info msg="RemoveContainer for \"36da5a0fb370e723326a9743b110be293f982a4028eb1a1d81cc396f52ee1ec8\""
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.424688914Z" level=info msg="RemoveContainer for \"36da5a0fb370e723326a9743b110be293f982a4028eb1a1d81cc396f52ee1ec8\" returns successfully"
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.426270114Z" level=info msg="StopPodSandbox for \"0a388c673a7b64b61d5da54c4235b2a2d56062cf32d6bcf45ec1d1d794828df0\""
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.426368247Z" level=info msg="TearDown network for sandbox \"0a388c673a7b64b61d5da54c4235b2a2d56062cf32d6bcf45ec1d1d794828df0\" successfully"
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.426380087Z" level=info msg="StopPodSandbox for \"0a388c673a7b64b61d5da54c4235b2a2d56062cf32d6bcf45ec1d1d794828df0\" returns successfully"
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.427016801Z" level=info msg="RemovePodSandbox for \"0a388c673a7b64b61d5da54c4235b2a2d56062cf32d6bcf45ec1d1d794828df0\""
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.427135110Z" level=info msg="Forcibly stopping sandbox \"0a388c673a7b64b61d5da54c4235b2a2d56062cf32d6bcf45ec1d1d794828df0\""
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.427207667Z" level=info msg="TearDown network for sandbox \"0a388c673a7b64b61d5da54c4235b2a2d56062cf32d6bcf45ec1d1d794828df0\" successfully"
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.431354308Z" level=warning msg="Failed to get podSandbox status for container event for sandboxID \"0a388c673a7b64b61d5da54c4235b2a2d56062cf32d6bcf45ec1d1d794828df0\": an error occurred when try to find sandbox: not found. Sending the event with nil podSandboxStatus."
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.431441594Z" level=info msg="RemovePodSandbox \"0a388c673a7b64b61d5da54c4235b2a2d56062cf32d6bcf45ec1d1d794828df0\" returns successfully"
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.431986239Z" level=info msg="StopPodSandbox for \"97c9faf1ef6b96c003fdebade5ac5727dccc08b05668dc04bb6f31fe25fbfbe2\""
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.432083559Z" level=info msg="TearDown network for sandbox \"97c9faf1ef6b96c003fdebade5ac5727dccc08b05668dc04bb6f31fe25fbfbe2\" successfully"
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.432095703Z" level=info msg="StopPodSandbox for \"97c9faf1ef6b96c003fdebade5ac5727dccc08b05668dc04bb6f31fe25fbfbe2\" returns successfully"
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.432498014Z" level=info msg="RemovePodSandbox for \"97c9faf1ef6b96c003fdebade5ac5727dccc08b05668dc04bb6f31fe25fbfbe2\""
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.432533238Z" level=info msg="Forcibly stopping sandbox \"97c9faf1ef6b96c003fdebade5ac5727dccc08b05668dc04bb6f31fe25fbfbe2\""
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.432596393Z" level=info msg="TearDown network for sandbox \"97c9faf1ef6b96c003fdebade5ac5727dccc08b05668dc04bb6f31fe25fbfbe2\" successfully"
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.437898744Z" level=warning msg="Failed to get podSandbox status for container event for sandboxID \"97c9faf1ef6b96c003fdebade5ac5727dccc08b05668dc04bb6f31fe25fbfbe2\": an error occurred when try to find sandbox: not found. Sending the event with nil podSandboxStatus."
	Sep 16 10:57:04 ha-234759 containerd[574]: time="2024-09-16T10:57:04.437981845Z" level=info msg="RemovePodSandbox \"97c9faf1ef6b96c003fdebade5ac5727dccc08b05668dc04bb6f31fe25fbfbe2\" returns successfully"
	Sep 16 10:57:15 ha-234759 containerd[574]: time="2024-09-16T10:57:15.408453855Z" level=info msg="CreateContainer within sandbox \"58d0e7de3b6eea0c0975d0a5d62a1bf749d5e7762cf1aeff37e6c952729a280c\" for container &ContainerMetadata{Name:storage-provisioner,Attempt:2,}"
	Sep 16 10:57:15 ha-234759 containerd[574]: time="2024-09-16T10:57:15.438585606Z" level=info msg="CreateContainer within sandbox \"58d0e7de3b6eea0c0975d0a5d62a1bf749d5e7762cf1aeff37e6c952729a280c\" for &ContainerMetadata{Name:storage-provisioner,Attempt:2,} returns container id \"35d6358938dea6788cef8042038c69dcd63a9de42f599cfc4b7797f2edc50f67\""
	Sep 16 10:57:15 ha-234759 containerd[574]: time="2024-09-16T10:57:15.440322835Z" level=info msg="StartContainer for \"35d6358938dea6788cef8042038c69dcd63a9de42f599cfc4b7797f2edc50f67\""
	Sep 16 10:57:15 ha-234759 containerd[574]: time="2024-09-16T10:57:15.509635724Z" level=info msg="StartContainer for \"35d6358938dea6788cef8042038c69dcd63a9de42f599cfc4b7797f2edc50f67\" returns successfully"
	
	
	==> coredns [0e47bf675d7dfaa397cb12c3a975c0fdae535cdf7989381139d757ac5f8a5eaf] <==
	[INFO] 10.244.2.2:52667 - 3 "AAAA IN kubernetes.io. udp 31 false 512" NOERROR qr,rd,ra 31 0.001304304s
	[INFO] 10.244.1.2:37083 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000164668s
	[INFO] 10.244.1.2:49488 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.00026353s
	[INFO] 10.244.1.2:56285 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.002384855s
	[INFO] 10.244.1.2:35002 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000170698s
	[INFO] 10.244.1.2:50858 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000131306s
	[INFO] 10.244.0.4:38621 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000076611s
	[INFO] 10.244.0.4:36661 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001075152s
	[INFO] 10.244.0.4:53651 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000141973s
	[INFO] 10.244.2.2:45377 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000162452s
	[INFO] 10.244.2.2:43234 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000100759s
	[INFO] 10.244.1.2:43502 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000106273s
	[INFO] 10.244.0.4:55514 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000173914s
	[INFO] 10.244.0.4:55773 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000884467s
	[INFO] 10.244.0.4:41665 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000052447s
	[INFO] 10.244.2.2:41797 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000094014s
	[INFO] 10.244.2.2:36525 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000069365s
	[INFO] 10.244.2.2:43068 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000061341s
	[INFO] 10.244.1.2:60478 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000124816s
	[INFO] 10.244.1.2:59811 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000107117s
	[INFO] 10.244.0.4:38611 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000104008s
	[INFO] 10.244.0.4:58312 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000183285s
	[INFO] 10.244.0.4:37216 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000090026s
	[INFO] 10.244.2.2:35594 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000105205s
	[INFO] 10.244.2.2:35249 - 5 "PTR IN 1.49.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000187798s
	
	
	==> coredns [41e6b84968e22184a2af0ae6cec735fddcf84294f353b02cf1533037a27300a2] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:44068 - 37292 "HINFO IN 6390764944663340044.6258342243358673423. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.017645978s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1734096306]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.155) (total time: 30000ms):
	Trace[1734096306]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:57:04.156)
	Trace[1734096306]: [30.000588643s] [30.000588643s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1653382229]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.156) (total time: 30000ms):
	Trace[1653382229]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:57:04.156)
	Trace[1653382229]: [30.000405859s] [30.000405859s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[126696912]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.155) (total time: 30001ms):
	Trace[126696912]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:57:04.156)
	Trace[126696912]: [30.001097645s] [30.001097645s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> coredns [d108cfac8fd4ff4eeef092b8bd57d5611a4f5a1e0f6ff03b515dd66901aea1da] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:49668 - 11463 "HINFO IN 672348644181696242.155275422572133107. udp 55 false 512" NXDOMAIN qr,rd,ra 55 0.013472072s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1609807867]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.111) (total time: 30001ms):
	Trace[1609807867]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (10:57:04.112)
	Trace[1609807867]: [30.001308162s] [30.001308162s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1774389382]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.111) (total time: 30000ms):
	Trace[1774389382]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:57:04.112)
	Trace[1774389382]: [30.000964944s] [30.000964944s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1459127489]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.111) (total time: 30001ms):
	Trace[1459127489]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:57:04.112)
	Trace[1459127489]: [30.001070585s] [30.001070585s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> coredns [e629c24c41e32603dc9a53125aa7122a5a0c58d985e95165cffe89d5670988c4] <==
	[INFO] 10.244.2.2:55324 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 44 0.001285121s
	[INFO] 10.244.1.2:48295 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.002432526s
	[INFO] 10.244.1.2:55741 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000227814s
	[INFO] 10.244.1.2:36649 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000202985s
	[INFO] 10.244.0.4:56384 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000102695s
	[INFO] 10.244.0.4:40529 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.00123465s
	[INFO] 10.244.0.4:54004 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000137197s
	[INFO] 10.244.0.4:51298 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000069636s
	[INFO] 10.244.0.4:46099 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000151031s
	[INFO] 10.244.2.2:44482 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001418231s
	[INFO] 10.244.2.2:41395 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000067806s
	[INFO] 10.244.2.2:34678 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000130428s
	[INFO] 10.244.2.2:45582 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001555683s
	[INFO] 10.244.2.2:39632 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000067101s
	[INFO] 10.244.2.2:46573 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00010962s
	[INFO] 10.244.1.2:45854 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000124578s
	[INFO] 10.244.1.2:52505 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000082141s
	[INFO] 10.244.1.2:54504 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000106543s
	[INFO] 10.244.0.4:43966 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000198588s
	[INFO] 10.244.2.2:57482 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000082715s
	[INFO] 10.244.1.2:42996 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000114255s
	[INFO] 10.244.1.2:54974 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.00016132s
	[INFO] 10.244.0.4:36323 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000157373s
	[INFO] 10.244.2.2:48775 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000152442s
	[INFO] 10.244.2.2:50527 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000112869s
	
	
	==> describe nodes <==
	Name:               ha-234759
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_51_47_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:51:45 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:57:45 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:56:22 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:56:22 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:56:22 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:56:22 +0000   Mon, 16 Sep 2024 10:51:46 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    ha-234759
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 735022337fd34a658820e97956183d56
	  System UUID:                2a58ed5f-69e8-4ab8-a10e-2a95cf1d9dec
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (11 in total)
	  Namespace                   Name                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                 ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-kjr9x              0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m32s
	  kube-system                 coredns-7c65d6cfc9-2l4br             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     6m1s
	  kube-system                 coredns-7c65d6cfc9-vqj8q             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     6m1s
	  kube-system                 etcd-ha-234759                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         6m6s
	  kube-system                 kindnet-q8nl6                        100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      6m1s
	  kube-system                 kube-apiserver-ha-234759             250m (12%)    0 (0%)      0 (0%)           0 (0%)         6m6s
	  kube-system                 kube-controller-manager-ha-234759    200m (10%)    0 (0%)      0 (0%)           0 (0%)         6m6s
	  kube-system                 kube-proxy-gwdl4                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         6m1s
	  kube-system                 kube-scheduler-ha-234759             100m (5%)     0 (0%)      0 (0%)           0 (0%)         6m6s
	  kube-system                 kube-vip-ha-234759                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         81s
	  kube-system                 storage-provisioner                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         6m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             290Mi (3%)  390Mi (4%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                  From             Message
	  ----     ------                   ----                 ----             -------
	  Normal   Starting                 78s                  kube-proxy       
	  Normal   Starting                 6m                   kube-proxy       
	  Normal   NodeAllocatableEnforced  6m6s                 kubelet          Updated Node Allocatable limit across pods
	  Normal   Starting                 6m6s                 kubelet          Starting kubelet.
	  Warning  CgroupV1                 6m6s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientPID     6m6s                 kubelet          Node ha-234759 status is now: NodeHasSufficientPID
	  Normal   NodeReady                6m6s                 kubelet          Node ha-234759 status is now: NodeReady
	  Normal   NodeHasSufficientMemory  6m6s                 kubelet          Node ha-234759 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    6m6s                 kubelet          Node ha-234759 status is now: NodeHasNoDiskPressure
	  Normal   RegisteredNode           6m2s                 node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           5m31s                node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           4m47s                node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           2m41s                node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   Starting                 108s                 kubelet          Starting kubelet.
	  Warning  CgroupV1                 108s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  108s (x8 over 108s)  kubelet          Node ha-234759 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    108s (x7 over 108s)  kubelet          Node ha-234759 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     108s (x7 over 108s)  kubelet          Node ha-234759 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  108s                 kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           92s                  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           90s                  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           72s                  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	
	
	Name:               ha-234759-m02
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_52_13_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:52:10 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:57:51 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:56:20 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:56:20 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:56:20 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:56:20 +0000   Mon, 16 Sep 2024 10:52:11 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.3
	  Hostname:    ha-234759-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 67a7fac1a4624ec780d67385525ee9c5
	  System UUID:                ee72b9d9-548d-49fb-8dc5-aa6839abad7f
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-7l4g7                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m32s
	  kube-system                 etcd-ha-234759-m02                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         5m40s
	  kube-system                 kindnet-svsnq                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      5m42s
	  kube-system                 kube-apiserver-ha-234759-m02             250m (12%)    0 (0%)      0 (0%)           0 (0%)         5m41s
	  kube-system                 kube-controller-manager-ha-234759-m02    200m (10%)    0 (0%)      0 (0%)           0 (0%)         5m40s
	  kube-system                 kube-proxy-f4jm2                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         5m42s
	  kube-system                 kube-scheduler-ha-234759-m02             100m (5%)     0 (0%)      0 (0%)           0 (0%)         5m41s
	  kube-system                 kube-vip-ha-234759-m02                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         5m37s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 77s                    kube-proxy       
	  Normal   Starting                 2m30s                  kube-proxy       
	  Normal   Starting                 5m34s                  kube-proxy       
	  Normal   NodeAllocatableEnforced  5m42s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           5m42s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   Starting                 5m42s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 5m42s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  5m42s (x8 over 5m42s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    5m42s (x7 over 5m42s)  kubelet          Node ha-234759-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     5m42s (x7 over 5m42s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           5m31s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   RegisteredNode           4m47s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   Starting                 2m53s                  kubelet          Starting kubelet.
	  Normal   NodeAllocatableEnforced  2m53s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientPID     2m53s (x7 over 2m53s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientPID
	  Warning  CgroupV1                 2m53s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  2m53s (x8 over 2m53s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    2m53s (x7 over 2m53s)  kubelet          Node ha-234759-m02 status is now: NodeHasNoDiskPressure
	  Normal   RegisteredNode           2m41s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   Starting                 105s                   kubelet          Starting kubelet.
	  Warning  CgroupV1                 105s                   kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  105s (x8 over 105s)    kubelet          Node ha-234759-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    105s (x7 over 105s)    kubelet          Node ha-234759-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     105s (x7 over 105s)    kubelet          Node ha-234759-m02 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  105s                   kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           92s                    node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   RegisteredNode           90s                    node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   RegisteredNode           72s                    node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	
	
	Name:               ha-234759-m04
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759-m04
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_54_13_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:54:12 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759-m04
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:57:43 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:57:32 +0000   Mon, 16 Sep 2024 10:57:32 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:57:32 +0000   Mon, 16 Sep 2024 10:57:32 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:57:32 +0000   Mon, 16 Sep 2024 10:57:32 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:57:32 +0000   Mon, 16 Sep 2024 10:57:32 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.5
	  Hostname:    ha-234759-m04
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 590016f10867484d8e210af5509eb8a6
	  System UUID:                3f4e61b4-061e-4448-a9f3-3c0401d9b215
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.3.0/24
	PodCIDRs:                     10.244.3.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-n5dcg    0 (0%)        0 (0%)      0 (0%)           0 (0%)         10s
	  kube-system                 kindnet-lwtj4              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m40s
	  kube-system                 kube-proxy-m84xg           0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m40s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 13s                    kube-proxy       
	  Normal   Starting                 3m37s                  kube-proxy       
	  Normal   NodeHasSufficientMemory  3m40s (x2 over 3m40s)  kubelet          Node ha-234759-m04 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m40s (x2 over 3m40s)  kubelet          Node ha-234759-m04 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m40s (x2 over 3m40s)  kubelet          Node ha-234759-m04 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  3m40s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeReady                3m39s                  kubelet          Node ha-234759-m04 status is now: NodeReady
	  Normal   RegisteredNode           3m37s                  node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           3m37s                  node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           3m36s                  node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           2m41s                  node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           92s                    node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           90s                    node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           72s                    node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   NodeNotReady             52s                    node-controller  Node ha-234759-m04 status is now: NodeNotReady
	  Normal   Starting                 32s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 32s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  32s                    kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasNoDiskPressure    26s (x7 over 32s)      kubelet          Node ha-234759-m04 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     26s (x7 over 32s)      kubelet          Node ha-234759-m04 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  20s (x8 over 32s)      kubelet          Node ha-234759-m04 status is now: NodeHasSufficientMemory
	
	
	==> dmesg <==
	
	
	==> etcd [324a5470436890084c7d201c8b4f70a15952b517d95c7e4094491c0aafb39871] <==
	{"level":"info","ts":"2024-09-16T10:55:44.064397Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T10:55:44.064497Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:55:44.064535Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:55:44.064589Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc [logterm: 2, index: 1743] sent MsgPreVote request to 3b59db4913cc3eb9 at term 2"}
	{"level":"info","ts":"2024-09-16T10:55:44.064633Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc [logterm: 2, index: 1743] sent MsgPreVote request to 94f3900974800f10 at term 2"}
	{"level":"warn","ts":"2024-09-16T10:55:44.494089Z","caller":"etcdserver/v3_server.go:920","msg":"waiting for ReadIndex response took too long, retrying","sent-request-id":8128031932398637138,"retry-timeout":"500ms"}
	{"level":"warn","ts":"2024-09-16T10:55:44.510815Z","caller":"v3rpc/interceptor.go:197","msg":"request stats","start time":"2024-09-16T10:55:37.509601Z","time spent":"7.001207045s","remote":"127.0.0.1:44210","response type":"/etcdserverpb.KV/Txn","request count":0,"request size":0,"response count":0,"response size":0,"request content":""}
	{"level":"warn","ts":"2024-09-16T10:55:44.996867Z","caller":"etcdserver/v3_server.go:920","msg":"waiting for ReadIndex response took too long, retrying","sent-request-id":8128031932398637138,"retry-timeout":"500ms"}
	{"level":"warn","ts":"2024-09-16T10:55:45.080474Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_SNAPSHOT","remote-peer-id":"94f3900974800f10","rtt":"2.797329ms","error":"dial tcp 192.168.49.4:2380: i/o timeout"}
	{"level":"warn","ts":"2024-09-16T10:55:45.083600Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_RAFT_MESSAGE","remote-peer-id":"94f3900974800f10","rtt":"23.691568ms","error":"dial tcp 192.168.49.4:2380: i/o timeout"}
	{"level":"warn","ts":"2024-09-16T10:55:45.403171Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"2.000175857s","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/health\" ","response":"","error":"context deadline exceeded"}
	{"level":"info","ts":"2024-09-16T10:55:45.403381Z","caller":"traceutil/trace.go:171","msg":"trace[780510158] range","detail":"{range_begin:/registry/health; range_end:; }","duration":"2.000399691s","start":"2024-09-16T10:55:43.402968Z","end":"2024-09-16T10:55:45.403368Z","steps":["trace[780510158] 'agreement among raft nodes before linearized reading'  (duration: 2.000173617s)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:55:45.403496Z","caller":"v3rpc/interceptor.go:197","msg":"request stats","start time":"2024-09-16T10:55:43.402931Z","time spent":"2.000550902s","remote":"127.0.0.1:43936","response type":"/etcdserverpb.KV/Range","request count":0,"request size":18,"response count":0,"response size":0,"request content":"key:\"/registry/health\" "}
	{"level":"warn","ts":"2024-09-16T10:55:45.496959Z","caller":"etcdserver/v3_server.go:920","msg":"waiting for ReadIndex response took too long, retrying","sent-request-id":8128031932398637138,"retry-timeout":"500ms"}
	{"level":"info","ts":"2024-09-16T10:55:45.562518Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T10:55:45.562564Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T10:55:45.562580Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 2"}
	{"level":"info","ts":"2024-09-16T10:55:45.562599Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc [logterm: 2, index: 1743] sent MsgPreVote request to 3b59db4913cc3eb9 at term 2"}
	{"level":"info","ts":"2024-09-16T10:55:45.562609Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc [logterm: 2, index: 1743] sent MsgPreVote request to 94f3900974800f10 at term 2"}
	{"level":"warn","ts":"2024-09-16T10:55:45.614276Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_SNAPSHOT","remote-peer-id":"3b59db4913cc3eb9","rtt":"4.380059ms","error":"dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:55:45.614607Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_RAFT_MESSAGE","remote-peer-id":"3b59db4913cc3eb9","rtt":"28.873246ms","error":"dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:55:45.634043Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"10.229130121s","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath\" ","response":"","error":"context canceled"}
	{"level":"info","ts":"2024-09-16T10:55:45.634313Z","caller":"traceutil/trace.go:171","msg":"trace[1523703293] range","detail":"{range_begin:/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath; range_end:; }","duration":"10.229412663s","start":"2024-09-16T10:55:35.404878Z","end":"2024-09-16T10:55:45.634291Z","steps":["trace[1523703293] 'agreement among raft nodes before linearized reading'  (duration: 10.229128809s)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:55:45.634547Z","caller":"v3rpc/interceptor.go:197","msg":"request stats","start time":"2024-09-16T10:55:35.404836Z","time spent":"10.229679599s","remote":"127.0.0.1:44108","response type":"/etcdserverpb.KV/Range","request count":0,"request size":67,"response count":0,"response size":0,"request content":"key:\"/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath\" "}
	2024/09/16 10:55:45 WARNING: [core] [Server #8] grpc: Server.processUnaryRPC failed to write status: connection error: desc = "transport is closing"
	
	
	==> etcd [b43574d882fbc3d7cc3c3eb1f0448a517af58314b6d7ebfedb1cf5aeac8827c8] <==
	{"level":"info","ts":"2024-09-16T10:56:29.402465Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"94f3900974800f10","stream-type":"stream MsgApp v2"}
	{"level":"info","ts":"2024-09-16T10:56:29.402498Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:56:29.404621Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"94f3900974800f10","stream-type":"stream Message"}
	{"level":"info","ts":"2024-09-16T10:56:29.404659Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:56:29.410620Z","caller":"rafthttp/stream.go:412","msg":"established TCP streaming connection with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"warn","ts":"2024-09-16T10:56:31.761297Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"100.417934ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/replicasets/default/busybox-7dff88458\" ","response":"range_response_count:1 size:2061"}
	{"level":"info","ts":"2024-09-16T10:56:31.761357Z","caller":"traceutil/trace.go:171","msg":"trace[1938970099] range","detail":"{range_begin:/registry/replicasets/default/busybox-7dff88458; range_end:; response_count:1; response_revision:1648; }","duration":"100.487013ms","start":"2024-09-16T10:56:31.660858Z","end":"2024-09-16T10:56:31.761345Z","steps":["trace[1938970099] 'agreement among raft nodes before linearized reading'  (duration: 100.321935ms)"],"step_count":1}
	{"level":"info","ts":"2024-09-16T10:57:44.785274Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc switched to configuration voters=(4276690428076244665 12593026477526642892)"}
	{"level":"info","ts":"2024-09-16T10:57:44.787286Z","caller":"membership/cluster.go:472","msg":"removed member","cluster-id":"fa54960ea34d58be","local-member-id":"aec36adc501070cc","removed-remote-peer-id":"94f3900974800f10","removed-remote-peer-urls":["https://192.168.49.4:2380"]}
	{"level":"info","ts":"2024-09-16T10:57:44.787331Z","caller":"rafthttp/peer.go:330","msg":"stopping remote peer","remote-peer-id":"94f3900974800f10"}
	{"level":"warn","ts":"2024-09-16T10:57:44.787916Z","caller":"rafthttp/stream.go:286","msg":"closed TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:57:44.787956Z","caller":"rafthttp/stream.go:294","msg":"stopped TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","remote-peer-id":"94f3900974800f10"}
	{"level":"warn","ts":"2024-09-16T10:57:44.788156Z","caller":"rafthttp/stream.go:286","msg":"closed TCP streaming connection with remote peer","stream-writer-type":"stream Message","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:57:44.788266Z","caller":"rafthttp/stream.go:294","msg":"stopped TCP streaming connection with remote peer","stream-writer-type":"stream Message","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:57:44.788400Z","caller":"rafthttp/pipeline.go:85","msg":"stopped HTTP pipelining with remote peer","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"warn","ts":"2024-09-16T10:57:44.788614Z","caller":"rafthttp/stream.go:421","msg":"lost TCP streaming connection with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10","error":"context canceled"}
	{"level":"warn","ts":"2024-09-16T10:57:44.788659Z","caller":"rafthttp/peer_status.go:66","msg":"peer became inactive (message send to peer failed)","peer-id":"94f3900974800f10","error":"failed to read 94f3900974800f10 on stream MsgApp v2 (context canceled)"}
	{"level":"info","ts":"2024-09-16T10:57:44.788748Z","caller":"rafthttp/stream.go:442","msg":"stopped stream reader with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"warn","ts":"2024-09-16T10:57:44.788920Z","caller":"rafthttp/stream.go:421","msg":"lost TCP streaming connection with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10","error":"context canceled"}
	{"level":"info","ts":"2024-09-16T10:57:44.788944Z","caller":"rafthttp/stream.go:442","msg":"stopped stream reader with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:57:44.788956Z","caller":"rafthttp/peer.go:335","msg":"stopped remote peer","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:57:44.788968Z","caller":"rafthttp/transport.go:355","msg":"removed remote peer","local-member-id":"aec36adc501070cc","removed-remote-peer-id":"94f3900974800f10"}
	{"level":"warn","ts":"2024-09-16T10:57:44.811186Z","caller":"embed/config_logging.go:170","msg":"rejected connection on client endpoint","remote-addr":"192.168.49.4:49914","server-name":"","error":"EOF"}
	{"level":"warn","ts":"2024-09-16T10:57:44.831614Z","caller":"embed/config_logging.go:170","msg":"rejected connection on peer endpoint","remote-addr":"192.168.49.4:35212","server-name":"","error":"EOF"}
	{"level":"warn","ts":"2024-09-16T10:57:44.835755Z","caller":"embed/config_logging.go:170","msg":"rejected connection on peer endpoint","remote-addr":"192.168.49.4:35198","server-name":"","error":"read tcp 192.168.49.2:2380->192.168.49.4:35198: read: connection reset by peer"}
	
	
	==> kernel <==
	 10:57:52 up 1 day, 14:40,  0 users,  load average: 3.10, 2.66, 1.95
	Linux ha-234759 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [7d51a8f7f42ff3becfe558f4f4801bec107af9426327a36d60c9cf3b27276148] <==
	I0916 10:55:12.824480       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:55:22.825039       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:55:22.825084       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:55:22.825207       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:55:22.825215       1 main.go:299] handling current node
	I0916 10:55:22.825229       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:55:22.825235       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:55:22.825288       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:55:22.825294       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:55:32.832123       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:55:32.832160       1 main.go:299] handling current node
	I0916 10:55:32.832177       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:55:32.832183       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:55:32.832449       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:55:32.832469       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:55:32.832589       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:55:32.832605       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:55:42.830779       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:55:42.830820       1 main.go:299] handling current node
	I0916 10:55:42.830840       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:55:42.830848       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:55:42.831127       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:55:42.831229       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:55:42.831402       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:55:42.831486       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	
	
	==> kindnet [9806835e6e060a0abfcf806d43a4b9a66b69879aff1b10ccf545e0ee3642321a] <==
	I0916 10:57:14.026150       1 main.go:299] handling current node
	I0916 10:57:24.030934       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:57:24.030982       1 main.go:299] handling current node
	I0916 10:57:24.030998       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:57:24.031005       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:57:24.031198       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:57:24.031272       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:57:24.031401       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:57:24.031446       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:57:34.024725       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:57:34.024763       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:57:34.025190       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:57:34.025212       1 main.go:299] handling current node
	I0916 10:57:34.025226       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:57:34.025232       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:57:34.025314       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:57:34.025406       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:57:44.024926       1 main.go:295] Handling node with IPs: map[192.168.49.4:{}]
	I0916 10:57:44.024974       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:57:44.025194       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:57:44.025211       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:57:44.025254       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:57:44.025270       1 main.go:299] handling current node
	I0916 10:57:44.025283       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:57:44.025293       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	
	
	==> kube-apiserver [2b3270e2806d2a49ec7811927861d7e576fa169288c5b4860691e358f1febfa3] <==
	I0916 10:56:18.922412       1 remote_available_controller.go:411] Starting RemoteAvailability controller
	I0916 10:56:19.186202       1 cache.go:32] Waiting for caches to sync for RemoteAvailability controller
	I0916 10:56:19.186293       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:56:18.922472       1 crdregistration_controller.go:114] Starting crd-autoregister controller
	I0916 10:56:19.186556       1 shared_informer.go:313] Waiting for caches to sync for crd-autoregister
	I0916 10:56:19.186638       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:56:18.932147       1 controller.go:80] Starting OpenAPI V3 AggregationController
	I0916 10:56:19.186967       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:56:19.187053       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:56:19.187165       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:56:19.187287       1 cache.go:39] Caches are synced for autoregister controller
	I0916 10:56:18.932182       1 controller.go:119] Starting legacy_token_tracking_controller
	I0916 10:56:19.188914       1 shared_informer.go:313] Waiting for caches to sync for configmaps
	I0916 10:56:18.948080       1 apf_controller.go:377] Starting API Priority and Fairness config controller
	I0916 10:56:19.189172       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 10:56:19.189256       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:56:19.200114       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:56:19.205566       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	W0916 10:56:19.207042       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.3]
	I0916 10:56:19.211861       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:56:19.229471       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	E0916 10:56:19.255762       1 controller.go:95] Found stale data, removed previous endpoints on kubernetes service, apiserver didn't exit successfully previously
	I0916 10:56:19.290406       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:56:19.956328       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	W0916 10:56:20.768485       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2 192.168.49.3]
	
	
	==> kube-apiserver [fd48034050bae874c9e190debc0b1bfa138cdf53fc5887bd15f027e7346ca82d] <==
	E0916 10:55:45.688585       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.688596       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.688605       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.688949       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.688963       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.688973       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.688984       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.688993       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.689004       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.689014       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.689024       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.689030       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.689034       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.689051       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.704248       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.704786       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.704803       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.704814       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.704825       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.704836       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.704847       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.704857       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.704867       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.704879       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:55:45.704889       1 watcher.go:342] watch chan error: etcdserver: no leader
	
	
	==> kube-controller-manager [63b795aabed00b1c546bdfc8a236583357fe63341f1c55f86a8e1bf68afb7aee] <==
	I0916 10:57:11.800068       1 endpointslice_controller.go:344] "Error syncing endpoint slices for service, retrying" logger="endpointslice-controller" key="kube-system/kube-dns" err="failed to update kube-dns-kbqtc EndpointSlice for Service kube-system/kube-dns: Operation cannot be fulfilled on endpointslices.discovery.k8s.io \"kube-dns-kbqtc\": the object has been modified; please apply your changes to the latest version and try again"
	I0916 10:57:11.801008       1 event.go:377] Event(v1.ObjectReference{Kind:"Service", Namespace:"kube-system", Name:"kube-dns", UID:"2767a06f-f40b-42fe-bba8-c52234ffe17b", APIVersion:"v1", ResourceVersion:"245", FieldPath:""}): type: 'Warning' reason: 'FailedToUpdateEndpointSlices' Error updating Endpoint Slices for Service kube-system/kube-dns: failed to update kube-dns-kbqtc EndpointSlice for Service kube-system/kube-dns: Operation cannot be fulfilled on endpointslices.discovery.k8s.io "kube-dns-kbqtc": the object has been modified; please apply your changes to the latest version and try again
	I0916 10:57:11.919155       1 endpointslice_controller.go:344] "Error syncing endpoint slices for service, retrying" logger="endpointslice-controller" key="kube-system/kube-dns" err="failed to update kube-dns-kbqtc EndpointSlice for Service kube-system/kube-dns: Operation cannot be fulfilled on endpointslices.discovery.k8s.io \"kube-dns-kbqtc\": the object has been modified; please apply your changes to the latest version and try again"
	I0916 10:57:11.919441       1 event.go:377] Event(v1.ObjectReference{Kind:"Service", Namespace:"kube-system", Name:"kube-dns", UID:"2767a06f-f40b-42fe-bba8-c52234ffe17b", APIVersion:"v1", ResourceVersion:"245", FieldPath:""}): type: 'Warning' reason: 'FailedToUpdateEndpointSlices' Error updating Endpoint Slices for Service kube-system/kube-dns: failed to update kube-dns-kbqtc EndpointSlice for Service kube-system/kube-dns: Operation cannot be fulfilled on endpointslices.discovery.k8s.io "kube-dns-kbqtc": the object has been modified; please apply your changes to the latest version and try again
	I0916 10:57:11.924372       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="183.185059ms"
	E0916 10:57:11.924678       1 replica_set.go:560] "Unhandled Error" err="sync \"kube-system/coredns-7c65d6cfc9\" failed with Operation cannot be fulfilled on replicasets.apps \"coredns-7c65d6cfc9\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:57:11.930014       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="334.947µs"
	I0916 10:57:11.935294       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="142.178µs"
	I0916 10:57:32.962054       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:57:32.963438       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="ha-234759-m04"
	I0916 10:57:32.985902       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:57:35.523111       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:57:41.939510       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	I0916 10:57:41.950177       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	I0916 10:57:42.393560       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="380.098147ms"
	E0916 10:57:42.393854       1 replica_set.go:560] "Unhandled Error" err="sync \"default/busybox-7dff88458\" failed with Operation cannot be fulfilled on replicasets.apps \"busybox-7dff88458\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:57:42.438427       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="38.300406ms"
	I0916 10:57:42.523104       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="84.621504ms"
	I0916 10:57:42.523227       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="82.986µs"
	I0916 10:57:44.258033       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="48.976µs"
	I0916 10:57:45.271583       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="31.413745ms"
	I0916 10:57:45.271768       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="116.824µs"
	I0916 10:57:46.391801       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	I0916 10:57:46.393901       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="ha-234759-m04"
	E0916 10:57:46.574496       1 garbagecollector.go:399] "Unhandled Error" err="error syncing item &garbagecollector.node{identity:garbagecollector.objectReference{OwnerReference:v1.OwnerReference{APIVersion:\"storage.k8s.io/v1\", Kind:\"CSINode\", Name:\"ha-234759-m03\", UID:\"0a40139b-8a68-4e34-b51d-18b2a74252fd\", Controller:(*bool)(nil), BlockOwnerDeletion:(*bool)(nil)}, Namespace:\"\"}, dependentsLock:sync.RWMutex{w:sync.Mutex{state:0, sema:0x0}, writerSem:0x0, readerSem:0x0, readerCount:atomic.Int32{_:atomic.noCopy{}, v:1}, readerWait:atomic.Int32{_:atomic.noCopy{}, v:0}}, dependents:map[*garbagecollector.node]struct {}{}, deletingDependents:false, deletingDependentsLock:sync.RWMutex{w:sync.Mutex{state:0, sema:0x0}, writerSem:0x0, readerSem:0x0, readerCount:atomic.Int32{_:atomic.noCopy{}, v:0}, readerWait:atomic.Int32{_:atomic.noCopy{}, v:0}}, beingDeleted:false, beingDeletedLock:sync.RWMutex{w:sync.Mutex{state:0, sema:0x0}, writerSem:0x0, readerSem:0x0, readerCount:atomic.Int32{_:atomic.noCopy{}
, v:0}, readerWait:atomic.Int32{_:atomic.noCopy{}, v:0}}, virtual:false, virtualLock:sync.RWMutex{w:sync.Mutex{state:0, sema:0x0}, writerSem:0x0, readerSem:0x0, readerCount:atomic.Int32{_:atomic.noCopy{}, v:0}, readerWait:atomic.Int32{_:atomic.noCopy{}, v:0}}, owners:[]v1.OwnerReference{v1.OwnerReference{APIVersion:\"v1\", Kind:\"Node\", Name:\"ha-234759-m03\", UID:\"f54fa22f-9520-483c-a3ac-d5fc9a1607e6\", Controller:(*bool)(nil), BlockOwnerDeletion:(*bool)(nil)}}}: csinodes.storage.k8s.io \"ha-234759-m03\" not found" logger="UnhandledError"
	
	
	==> kube-controller-manager [a7002833ce71be5c884b6b01ea4b5b23ca5c4dbd6a84cfaeed6b4d3e9829e35b] <==
	I0916 10:53:23.794530       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="1.638185ms"
	I0916 10:53:24.744495       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="76.439µs"
	I0916 10:53:25.590578       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	I0916 10:53:41.904648       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m02"
	I0916 10:53:49.302045       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759"
	I0916 10:53:50.414741       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="58.307681ms"
	I0916 10:53:50.499441       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="84.648734ms"
	I0916 10:53:50.499782       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="65.657µs"
	I0916 10:53:56.200551       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	E0916 10:54:12.039568       1 certificate_controller.go:151] "Unhandled Error" err="Sync csr-q5rd8 failed with : error updating signature for csr: Operation cannot be fulfilled on certificatesigningrequests.certificates.k8s.io \"csr-q5rd8\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:54:12.223078       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"ha-234759-m04\" does not exist"
	I0916 10:54:12.277872       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="ha-234759-m04" podCIDRs=["10.244.3.0/24"]
	I0916 10:54:12.278590       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:12.278799       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:12.685159       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:13.216047       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:13.275364       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:13.275441       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="ha-234759-m04"
	I0916 10:54:13.299972       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:54:15.629918       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="ha-234759-m04"
	I0916 10:55:12.701175       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m02"
	I0916 10:55:20.819404       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="16.539827ms"
	I0916 10:55:20.819835       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="180.233µs"
	I0916 10:55:21.947403       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="31.241774ms"
	I0916 10:55:21.947708       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="65.017µs"
	
	
	==> kube-proxy [900d2ad5148fe65aefe93ce1d29763ab71494f94b5511bca347f273235ccc038] <==
	I0916 10:51:52.177892       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:51:52.290574       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:51:52.290638       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:51:52.352222       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:51:52.352350       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:51:52.354429       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:51:52.355024       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:51:52.355194       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:51:52.362196       1 config.go:199] "Starting service config controller"
	I0916 10:51:52.362316       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:51:52.362400       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:51:52.362458       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:51:52.363411       1 config.go:328] "Starting node config controller"
	I0916 10:51:52.365650       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:51:52.462531       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:51:52.462632       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:51:52.471949       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [e617f849070edf4c16b86d0792dd8cbf9f27eacef983569f922b9f407467c670] <==
	I0916 10:56:33.475454       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:56:33.972451       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:56:33.972524       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:56:34.173072       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:56:34.173415       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:56:34.179384       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:56:34.179904       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:56:34.186538       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:56:34.188077       1 config.go:199] "Starting service config controller"
	I0916 10:56:34.188310       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:56:34.189069       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:56:34.189177       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:56:34.189890       1 config.go:328] "Starting node config controller"
	I0916 10:56:34.190035       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:56:34.289961       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:56:34.290244       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:56:34.290649       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [5a7d53b11a05f48872cdd02a26e2074bdb1c6ee6e353ceb2ff9519faca117d67] <==
	E0916 10:53:20.488702       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-kjr9x\": pod busybox-7dff88458-kjr9x is already assigned to node \"ha-234759\"" pod="default/busybox-7dff88458-kjr9x"
	I0916 10:53:20.488719       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-kjr9x" node="ha-234759"
	E0916 10:53:20.489597       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-m9lsb\": pod busybox-7dff88458-m9lsb is already assigned to node \"ha-234759-m03\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-m9lsb" node="ha-234759-m03"
	E0916 10:53:20.489638       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod af6e1f4d-184c-4d9d-bed7-b49448f6daa9(default/busybox-7dff88458-m9lsb) wasn't assumed so cannot be forgotten" pod="default/busybox-7dff88458-m9lsb"
	E0916 10:53:20.489651       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-m9lsb\": pod busybox-7dff88458-m9lsb is already assigned to node \"ha-234759-m03\"" pod="default/busybox-7dff88458-m9lsb"
	I0916 10:53:20.489677       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-m9lsb" node="ha-234759-m03"
	E0916 10:54:12.393673       1 schedule_one.go:953] "Scheduler cache AssumePod failed" err="pod 10919f4b-06e2-4ba9-8ed7-6a6493352be5(kube-system/kube-proxy-xscmm) is in the cache, so can't be assumed" pod="kube-system/kube-proxy-xscmm"
	E0916 10:54:12.393715       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="pod 10919f4b-06e2-4ba9-8ed7-6a6493352be5(kube-system/kube-proxy-xscmm) is in the cache, so can't be assumed" pod="kube-system/kube-proxy-xscmm"
	I0916 10:54:12.393736       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-xscmm" node="ha-234759-m04"
	E0916 10:54:12.419351       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-sk6c5\": pod kindnet-sk6c5 is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kindnet-sk6c5" node="ha-234759-m04"
	E0916 10:54:12.419403       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 021eac3e-1cf0-40c0-a4e8-6bfe73a62a75(kube-system/kindnet-sk6c5) wasn't assumed so cannot be forgotten" pod="kube-system/kindnet-sk6c5"
	E0916 10:54:12.419578       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-sk6c5\": pod kindnet-sk6c5 is already assigned to node \"ha-234759-m04\"" pod="kube-system/kindnet-sk6c5"
	I0916 10:54:12.419725       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kindnet-sk6c5" node="ha-234759-m04"
	E0916 10:54:12.434533       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-m84xg\": pod kube-proxy-m84xg is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-m84xg" node="ha-234759-m04"
	E0916 10:54:12.434587       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 6b84ade0-ae34-4c7e-b1ed-e9c83f4bf130(kube-system/kube-proxy-m84xg) wasn't assumed so cannot be forgotten" pod="kube-system/kube-proxy-m84xg"
	E0916 10:54:12.434606       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-m84xg\": pod kube-proxy-m84xg is already assigned to node \"ha-234759-m04\"" pod="kube-system/kube-proxy-m84xg"
	I0916 10:54:12.434653       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-m84xg" node="ha-234759-m04"
	E0916 10:54:12.656470       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-c59dr\": pod kindnet-c59dr is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kindnet-c59dr" node="ha-234759-m04"
	E0916 10:54:12.656573       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 929f6efd-2b9a-4b18-919d-36fc692d45c4(kube-system/kindnet-c59dr) wasn't assumed so cannot be forgotten" pod="kube-system/kindnet-c59dr"
	E0916 10:54:12.656663       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-c59dr\": pod kindnet-c59dr is already assigned to node \"ha-234759-m04\"" pod="kube-system/kindnet-c59dr"
	I0916 10:54:12.656746       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kindnet-c59dr" node="ha-234759-m04"
	E0916 10:54:12.678394       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-zcn6b\": pod kube-proxy-zcn6b is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-zcn6b" node="ha-234759-m04"
	E0916 10:54:12.678469       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod 66cac608-b205-4573-afb9-4e337fdadf3c(kube-system/kube-proxy-zcn6b) wasn't assumed so cannot be forgotten" pod="kube-system/kube-proxy-zcn6b"
	E0916 10:54:12.678502       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-zcn6b\": pod kube-proxy-zcn6b is already assigned to node \"ha-234759-m04\"" pod="kube-system/kube-proxy-zcn6b"
	I0916 10:54:12.678547       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-zcn6b" node="ha-234759-m04"
	
	
	==> kube-scheduler [d4bb975eec45b54011799a4101f9ba6709586b66878d5c3e3073998608e29857] <==
	I0916 10:56:17.913920       1 serving.go:386] Generated self-signed cert in-memory
	I0916 10:56:19.486557       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:56:19.486821       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:56:19.493410       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 10:56:19.493459       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 10:56:19.493514       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:56:19.493552       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:56:19.493570       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 10:56:19.493580       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:56:19.494208       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:56:19.494420       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:56:19.593680       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:56:19.593683       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 10:56:19.593707       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	E0916 10:57:42.130426       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-n5dcg\": pod busybox-7dff88458-n5dcg is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-n5dcg" node="ha-234759-m04"
	E0916 10:57:42.132308       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod d46d3e7a-cb65-4ad3-a038-da0384151c20(default/busybox-7dff88458-n5dcg) wasn't assumed so cannot be forgotten" pod="default/busybox-7dff88458-n5dcg"
	E0916 10:57:42.132381       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-n5dcg\": pod busybox-7dff88458-n5dcg is already assigned to node \"ha-234759-m04\"" pod="default/busybox-7dff88458-n5dcg"
	I0916 10:57:42.132403       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-n5dcg" node="ha-234759-m04"
	
	
	==> kubelet <==
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.350983     677 apiserver.go:52] "Watching apiserver"
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.357823     677 kubelet.go:1895] "Trying to delete pod" pod="kube-system/kube-vip-ha-234759" podUID="41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e"
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.372599     677 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world"
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.390498     677 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/8ea118a7-cc54-4dd9-8bb2-cfc133a376fc-xtables-lock\") pod \"kube-proxy-gwdl4\" (UID: \"8ea118a7-cc54-4dd9-8bb2-cfc133a376fc\") " pod="kube-system/kube-proxy-gwdl4"
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.390540     677 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8ea118a7-cc54-4dd9-8bb2-cfc133a376fc-lib-modules\") pod \"kube-proxy-gwdl4\" (UID: \"8ea118a7-cc54-4dd9-8bb2-cfc133a376fc\") " pod="kube-system/kube-proxy-gwdl4"
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.390563     677 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/e8924914-9ba5-4adc-ac46-9d3d97b0bc08-tmp\") pod \"storage-provisioner\" (UID: \"e8924914-9ba5-4adc-ac46-9d3d97b0bc08\") " pod="kube-system/storage-provisioner"
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.390788     677 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/ee79653f-8a9e-41e2-82bb-7b08fc87e265-xtables-lock\") pod \"kindnet-q8nl6\" (UID: \"ee79653f-8a9e-41e2-82bb-7b08fc87e265\") " pod="kube-system/kindnet-q8nl6"
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.390814     677 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ee79653f-8a9e-41e2-82bb-7b08fc87e265-lib-modules\") pod \"kindnet-q8nl6\" (UID: \"ee79653f-8a9e-41e2-82bb-7b08fc87e265\") " pod="kube-system/kindnet-q8nl6"
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.390846     677 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-cfg\" (UniqueName: \"kubernetes.io/host-path/ee79653f-8a9e-41e2-82bb-7b08fc87e265-cni-cfg\") pod \"kindnet-q8nl6\" (UID: \"ee79653f-8a9e-41e2-82bb-7b08fc87e265\") " pod="kube-system/kindnet-q8nl6"
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.443429     677 kubelet.go:1900] "Deleted mirror pod because it is outdated" pod="kube-system/kube-vip-ha-234759"
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.482131     677 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 10:56:31 ha-234759 kubelet[677]: I0916 10:56:31.638552     677 kubelet.go:1895] "Trying to delete pod" pod="kube-system/kube-vip-ha-234759" podUID="41a1ec5f-e3ae-4b06-9a3f-f76b54f7d24e"
	Sep 16 10:56:32 ha-234759 kubelet[677]: I0916 10:56:32.334081     677 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-vip-ha-234759" podStartSLOduration=1.33405818 podStartE2EDuration="1.33405818s" podCreationTimestamp="2024-09-16 10:56:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 10:56:32.304396531 +0000 UTC m=+28.082448275" watchObservedRunningTime="2024-09-16 10:56:32.33405818 +0000 UTC m=+28.112109932"
	Sep 16 10:56:32 ha-234759 kubelet[677]: I0916 10:56:32.408551     677 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="307d3d209e030cc25b0d493bb59102a5" path="/var/lib/kubelet/pods/307d3d209e030cc25b0d493bb59102a5/volumes"
	Sep 16 10:56:34 ha-234759 kubelet[677]: E0916 10:56:34.632235     677 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 10:56:34 ha-234759 kubelet[677]: E0916 10:56:34.632292     677 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 10:56:44 ha-234759 kubelet[677]: E0916 10:56:44.657310     677 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 10:56:44 ha-234759 kubelet[677]: E0916 10:56:44.657356     677 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 10:56:54 ha-234759 kubelet[677]: E0916 10:56:54.681771     677 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 10:56:54 ha-234759 kubelet[677]: E0916 10:56:54.681817     677 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 10:57:04 ha-234759 kubelet[677]: I0916 10:57:04.069241     677 scope.go:117] "RemoveContainer" containerID="2586d6167e7558670ba282e8e630c6301933dfdf0302bef7718de8fada959378"
	Sep 16 10:57:04 ha-234759 kubelet[677]: I0916 10:57:04.069824     677 scope.go:117] "RemoveContainer" containerID="ad8481816fac562f3dc8702419401da1af3f9cc51ce398f6c059a1d447126faf"
	Sep 16 10:57:04 ha-234759 kubelet[677]: E0916 10:57:04.070077     677 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"storage-provisioner\" with CrashLoopBackOff: \"back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(e8924914-9ba5-4adc-ac46-9d3d97b0bc08)\"" pod="kube-system/storage-provisioner" podUID="e8924914-9ba5-4adc-ac46-9d3d97b0bc08"
	Sep 16 10:57:04 ha-234759 kubelet[677]: I0916 10:57:04.417076     677 scope.go:117] "RemoveContainer" containerID="36da5a0fb370e723326a9743b110be293f982a4028eb1a1d81cc396f52ee1ec8"
	Sep 16 10:57:15 ha-234759 kubelet[677]: I0916 10:57:15.404508     677 scope.go:117] "RemoveContainer" containerID="ad8481816fac562f3dc8702419401da1af3f9cc51ce398f6c059a1d447126faf"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p ha-234759 -n ha-234759
helpers_test.go:261: (dbg) Run:  kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (598.273µs)
helpers_test.go:263: kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiControlPlane/serial/DeleteSecondaryNode (13.23s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartCluster (81.26s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartCluster
ha_test.go:560: (dbg) Run:  out/minikube-linux-arm64 start -p ha-234759 --wait=true -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd
E0916 10:58:33.956498 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:59:07.672440 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:59:35.374474 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:560: (dbg) Done: out/minikube-linux-arm64 start -p ha-234759 --wait=true -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd: (1m17.302535185s)
ha_test.go:566: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr
ha_test.go:584: (dbg) Run:  kubectl get nodes
ha_test.go:584: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (560.818µs)
ha_test.go:586: failed to run kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiControlPlane/serial/RestartCluster]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect ha-234759
helpers_test.go:235: (dbg) docker inspect ha-234759:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59",
	        "Created": "2024-09-16T10:51:26.447161448Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2131840,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T10:58:31.553623236Z",
	            "FinishedAt": "2024-09-16T10:58:30.680802761Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/hostname",
	        "HostsPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/hosts",
	        "LogPath": "/var/lib/docker/containers/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59-json.log",
	        "Name": "/ha-234759",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "ha-234759:/var",
	                "/lib/modules:/lib/modules:ro"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "ha-234759",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/merged",
	                "UpperDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/diff",
	                "WorkDir": "/var/lib/docker/overlay2/6c0f49aa358da38b0b2b4bf0b93c9292e0b7ff0cbfd2932f57d7d7a0cb64a613/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "volume",
	                "Name": "ha-234759",
	                "Source": "/var/lib/docker/volumes/ha-234759/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            },
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            }
	        ],
	        "Config": {
	            "Hostname": "ha-234759",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "ha-234759",
	                "name.minikube.sigs.k8s.io": "ha-234759",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "bd8e24de22b1ee60351c9f68a8aa16f59d6936617e76a10ca1ff916880c9a847",
	            "SandboxKey": "/var/run/docker/netns/bd8e24de22b1",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40642"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40643"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40646"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40644"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40645"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "ha-234759": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.49.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:31:02",
	                    "DriverOpts": null,
	                    "NetworkID": "941929ec13d1e6034904933d29100a93cf04d9e6a30844d8d0c54e3a464c32cd",
	                    "EndpointID": "2bbb1255731535a390fd7130098c71e1668dea9cc65a4967a9b3b5a7516f61d6",
	                    "Gateway": "192.168.49.1",
	                    "IPAddress": "192.168.49.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "ha-234759",
	                        "6306ac5a5985"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p ha-234759 -n ha-234759
helpers_test.go:244: <<< TestMultiControlPlane/serial/RestartCluster FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiControlPlane/serial/RestartCluster]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p ha-234759 logs -n 25: (2.151543765s)
helpers_test.go:252: TestMultiControlPlane/serial/RestartCluster logs: 
-- stdout --
	
	==> Audit <==
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| Command |                                       Args                                       |  Profile  |  User   | Version |     Start Time      |      End Time       |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	| cp      | ha-234759 cp ha-234759-m03:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04:/home/docker/cp-test_ha-234759-m03_ha-234759-m04.txt               |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m03 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759-m04 sudo cat                                          | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m03_ha-234759-m04.txt                             |           |         |         |                     |                     |
	| cp      | ha-234759 cp testdata/cp-test.txt                                                | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04:/home/docker/cp-test.txt                                           |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /tmp/TestMultiControlPlaneserialCopyFile3470256434/001/cp-test_ha-234759-m04.txt |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759:/home/docker/cp-test_ha-234759-m04_ha-234759.txt                       |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759 sudo cat                                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m04_ha-234759.txt                                 |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m02:/home/docker/cp-test_ha-234759-m04_ha-234759-m02.txt               |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759-m02 sudo cat                                          | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m04_ha-234759-m02.txt                             |           |         |         |                     |                     |
	| cp      | ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m03:/home/docker/cp-test_ha-234759-m04_ha-234759-m03.txt               |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n                                                                 | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | ha-234759-m04 sudo cat                                                           |           |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                         |           |         |         |                     |                     |
	| ssh     | ha-234759 ssh -n ha-234759-m03 sudo cat                                          | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | /home/docker/cp-test_ha-234759-m04_ha-234759-m03.txt                             |           |         |         |                     |                     |
	| node    | ha-234759 node stop m02 -v=7                                                     | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:54 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | ha-234759 node start m02 -v=7                                                    | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:54 UTC | 16 Sep 24 10:55 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | list -p ha-234759 -v=7                                                           | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC |                     |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| stop    | -p ha-234759 -v=7                                                                | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:55 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| start   | -p ha-234759 --wait=true -v=7                                                    | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:55 UTC | 16 Sep 24 10:57 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| node    | list -p ha-234759                                                                | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:57 UTC |                     |
	| node    | ha-234759 node delete m03 -v=7                                                   | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:57 UTC | 16 Sep 24 10:57 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| stop    | ha-234759 stop -v=7                                                              | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:57 UTC | 16 Sep 24 10:58 UTC |
	|         | --alsologtostderr                                                                |           |         |         |                     |                     |
	| start   | -p ha-234759 --wait=true                                                         | ha-234759 | jenkins | v1.34.0 | 16 Sep 24 10:58 UTC | 16 Sep 24 10:59 UTC |
	|         | -v=7 --alsologtostderr                                                           |           |         |         |                     |                     |
	|         | --driver=docker                                                                  |           |         |         |                     |                     |
	|         | --container-runtime=containerd                                                   |           |         |         |                     |                     |
	|---------|----------------------------------------------------------------------------------|-----------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:58:31
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:58:31.099993 2131642 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:58:31.100189 2131642 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:58:31.100199 2131642 out.go:358] Setting ErrFile to fd 2...
	I0916 10:58:31.100204 2131642 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:58:31.100518 2131642 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:58:31.100922 2131642 out.go:352] Setting JSON to false
	I0916 10:58:31.101924 2131642 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":139253,"bootTime":1726345058,"procs":189,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:58:31.102004 2131642 start.go:139] virtualization:  
	I0916 10:58:31.104724 2131642 out.go:177] * [ha-234759] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:58:31.106892 2131642 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:58:31.107039 2131642 notify.go:220] Checking for updates...
	I0916 10:58:31.110485 2131642 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:58:31.112504 2131642 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:58:31.114432 2131642 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:58:31.116554 2131642 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:58:31.118744 2131642 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:58:31.121443 2131642 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:58:31.121995 2131642 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:58:31.143384 2131642 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:58:31.143510 2131642 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:58:31.201809 2131642 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:5 ContainersRunning:2 ContainersPaused:0 ContainersStopped:3 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:57 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 10:58:31.19177523 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aarc
h64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:58:31.201934 2131642 docker.go:318] overlay module found
	I0916 10:58:31.204475 2131642 out.go:177] * Using the docker driver based on existing profile
	I0916 10:58:31.206915 2131642 start.go:297] selected driver: docker
	I0916 10:58:31.206935 2131642 start.go:901] validating driver "docker" against &{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName
:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false
kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: Socket
VMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:58:31.207090 2131642 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:58:31.207208 2131642 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:58:31.259675 2131642 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:5 ContainersRunning:2 ContainersPaused:0 ContainersStopped:3 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:57 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 10:58:31.250191425 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:58:31.260084 2131642 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:58:31.260120 2131642 cni.go:84] Creating CNI manager for ""
	I0916 10:58:31.260185 2131642 cni.go:136] multinode detected (3 nodes found), recommending kindnet
	I0916 10:58:31.260244 2131642 start.go:340] cluster config:
	{Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-se
rver:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0
GPUs: AutoPauseInterval:1m0s}
	I0916 10:58:31.263215 2131642 out.go:177] * Starting "ha-234759" primary control-plane node in "ha-234759" cluster
	I0916 10:58:31.265541 2131642 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:58:31.268045 2131642 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:58:31.270524 2131642 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:58:31.270582 2131642 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 10:58:31.270595 2131642 cache.go:56] Caching tarball of preloaded images
	I0916 10:58:31.270617 2131642 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:58:31.270720 2131642 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:58:31.270733 2131642 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:58:31.270898 2131642 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	W0916 10:58:31.289893 2131642 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:58:31.289918 2131642 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:58:31.289997 2131642 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:58:31.290021 2131642 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:58:31.290026 2131642 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:58:31.290034 2131642 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:58:31.290046 2131642 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:58:31.291453 2131642 image.go:273] response: 
	I0916 10:58:31.411528 2131642 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:58:31.411567 2131642 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:58:31.411597 2131642 start.go:360] acquireMachinesLock for ha-234759: {Name:mk07434fa5fb218c324ac4567510c65c6e772f63 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:58:31.411675 2131642 start.go:364] duration metric: took 43.397µs to acquireMachinesLock for "ha-234759"
	I0916 10:58:31.411699 2131642 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:58:31.411707 2131642 fix.go:54] fixHost starting: 
	I0916 10:58:31.411987 2131642 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:58:31.428562 2131642 fix.go:112] recreateIfNeeded on ha-234759: state=Stopped err=<nil>
	W0916 10:58:31.428592 2131642 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:58:31.431823 2131642 out.go:177] * Restarting existing docker container for "ha-234759" ...
	I0916 10:58:31.434167 2131642 cli_runner.go:164] Run: docker start ha-234759
	I0916 10:58:31.756533 2131642 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:58:31.779858 2131642 kic.go:430] container "ha-234759" state is running.
	I0916 10:58:31.780270 2131642 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:58:31.801458 2131642 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:58:31.801704 2131642 machine.go:93] provisionDockerMachine start ...
	I0916 10:58:31.801772 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:58:31.823798 2131642 main.go:141] libmachine: Using SSH client type: native
	I0916 10:58:31.824049 2131642 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40642 <nil> <nil>}
	I0916 10:58:31.824061 2131642 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:58:31.824778 2131642 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 10:58:34.962194 2131642 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759
	
	I0916 10:58:34.962238 2131642 ubuntu.go:169] provisioning hostname "ha-234759"
	I0916 10:58:34.962304 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:58:34.979618 2131642 main.go:141] libmachine: Using SSH client type: native
	I0916 10:58:34.979870 2131642 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40642 <nil> <nil>}
	I0916 10:58:34.979886 2131642 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759 && echo "ha-234759" | sudo tee /etc/hostname
	I0916 10:58:35.131309 2131642 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759
	
	I0916 10:58:35.131412 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:58:35.149459 2131642 main.go:141] libmachine: Using SSH client type: native
	I0916 10:58:35.149749 2131642 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40642 <nil> <nil>}
	I0916 10:58:35.149774 2131642 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:58:35.286670 2131642 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:58:35.286731 2131642 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:58:35.286752 2131642 ubuntu.go:177] setting up certificates
	I0916 10:58:35.286764 2131642 provision.go:84] configureAuth start
	I0916 10:58:35.286830 2131642 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:58:35.303545 2131642 provision.go:143] copyHostCerts
	I0916 10:58:35.303589 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:58:35.303626 2131642 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:58:35.303637 2131642 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:58:35.303716 2131642 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:58:35.303802 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:58:35.303824 2131642 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:58:35.303829 2131642 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:58:35.303861 2131642 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:58:35.303907 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:58:35.303927 2131642 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:58:35.303935 2131642 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:58:35.303962 2131642 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:58:35.304020 2131642 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759 san=[127.0.0.1 192.168.49.2 ha-234759 localhost minikube]
	I0916 10:58:35.954265 2131642 provision.go:177] copyRemoteCerts
	I0916 10:58:35.954337 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:58:35.954377 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:58:35.970716 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40642 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:58:36.068274 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:58:36.068346 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:58:36.093903 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:58:36.093977 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1200 bytes)
	I0916 10:58:36.119611 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:58:36.119701 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:58:36.144733 2131642 provision.go:87] duration metric: took 857.940775ms to configureAuth
	I0916 10:58:36.144763 2131642 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:58:36.145061 2131642 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:58:36.145075 2131642 machine.go:96] duration metric: took 4.34335596s to provisionDockerMachine
	I0916 10:58:36.145085 2131642 start.go:293] postStartSetup for "ha-234759" (driver="docker")
	I0916 10:58:36.145101 2131642 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:58:36.145162 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:58:36.145206 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:58:36.161583 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40642 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:58:36.263681 2131642 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:58:36.266816 2131642 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:58:36.266851 2131642 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:58:36.266862 2131642 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:58:36.266869 2131642 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:58:36.266879 2131642 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:58:36.266937 2131642 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:58:36.267012 2131642 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:58:36.267019 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:58:36.267124 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:58:36.275611 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:58:36.301022 2131642 start.go:296] duration metric: took 155.917441ms for postStartSetup
	I0916 10:58:36.301126 2131642 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:58:36.301174 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:58:36.317598 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40642 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:58:36.411673 2131642 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:58:36.416231 2131642 fix.go:56] duration metric: took 5.004515927s for fixHost
	I0916 10:58:36.416258 2131642 start.go:83] releasing machines lock for "ha-234759", held for 5.004570943s
	I0916 10:58:36.416327 2131642 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:58:36.432833 2131642 ssh_runner.go:195] Run: cat /version.json
	I0916 10:58:36.432898 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:58:36.432956 2131642 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:58:36.433042 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:58:36.458086 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40642 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:58:36.458853 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40642 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:58:36.555360 2131642 ssh_runner.go:195] Run: systemctl --version
	I0916 10:58:36.680588 2131642 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:58:36.685132 2131642 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:58:36.703379 2131642 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:58:36.703481 2131642 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:58:36.712459 2131642 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:58:36.712490 2131642 start.go:495] detecting cgroup driver to use...
	I0916 10:58:36.712524 2131642 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:58:36.712577 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:58:36.727467 2131642 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:58:36.740385 2131642 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:58:36.740455 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:58:36.754254 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:58:36.766223 2131642 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:58:36.847233 2131642 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:58:36.927615 2131642 docker.go:233] disabling docker service ...
	I0916 10:58:36.927686 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:58:36.940823 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:58:36.952905 2131642 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:58:37.041617 2131642 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:58:37.123591 2131642 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:58:37.135602 2131642 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:58:37.152546 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:58:37.163146 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:58:37.173143 2131642 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:58:37.173327 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:58:37.183465 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:58:37.193724 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:58:37.203915 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:58:37.213838 2131642 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:58:37.223319 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:58:37.233591 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:58:37.243634 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:58:37.254281 2131642 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:58:37.263314 2131642 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:58:37.272125 2131642 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:58:37.351945 2131642 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:58:37.547240 2131642 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:58:37.547322 2131642 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:58:37.551665 2131642 start.go:563] Will wait 60s for crictl version
	I0916 10:58:37.551742 2131642 ssh_runner.go:195] Run: which crictl
	I0916 10:58:37.555583 2131642 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:58:37.595910 2131642 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:58:37.596030 2131642 ssh_runner.go:195] Run: containerd --version
	I0916 10:58:37.620161 2131642 ssh_runner.go:195] Run: containerd --version
	I0916 10:58:37.652672 2131642 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:58:37.655275 2131642 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:58:37.670188 2131642 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:58:37.673933 2131642 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:58:37.685482 2131642 kubeadm.go:883] updating cluster {Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflo
w:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetCl
ientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 10:58:37.685641 2131642 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:58:37.685710 2131642 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:58:37.723308 2131642 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:58:37.723334 2131642 containerd.go:534] Images already preloaded, skipping extraction
	I0916 10:58:37.723397 2131642 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 10:58:37.759366 2131642 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 10:58:37.759387 2131642 cache_images.go:84] Images are preloaded, skipping loading
	I0916 10:58:37.759396 2131642 kubeadm.go:934] updating node { 192.168.49.2 8443 v1.31.1 containerd true true} ...
	I0916 10:58:37.759504 2131642 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:58:37.759575 2131642 ssh_runner.go:195] Run: sudo crictl info
	I0916 10:58:37.800913 2131642 cni.go:84] Creating CNI manager for ""
	I0916 10:58:37.800934 2131642 cni.go:136] multinode detected (3 nodes found), recommending kindnet
	I0916 10:58:37.800944 2131642 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 10:58:37.800968 2131642 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.49.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:ha-234759 NodeName:ha-234759 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.49.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.49.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kuberne
tes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 10:58:37.801106 2131642 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.49.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "ha-234759"
	  kubeletExtraArgs:
	    node-ip: 192.168.49.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.49.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 10:58:37.801126 2131642 kube-vip.go:115] generating kube-vip config ...
	I0916 10:58:37.801176 2131642 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:58:37.814021 2131642 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:58:37.814143 2131642 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:58:37.814212 2131642 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:58:37.823492 2131642 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:58:37.823568 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube /etc/kubernetes/manifests
	I0916 10:58:37.832139 2131642 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (313 bytes)
	I0916 10:58:37.850852 2131642 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:58:37.869023 2131642 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2163 bytes)
	I0916 10:58:37.887636 2131642 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:58:37.906025 2131642 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:58:37.909852 2131642 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:58:37.920772 2131642 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:58:38.015605 2131642 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:58:38.034245 2131642 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.2
	I0916 10:58:38.034271 2131642 certs.go:194] generating shared ca certs ...
	I0916 10:58:38.034289 2131642 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:58:38.034439 2131642 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:58:38.034492 2131642 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:58:38.034505 2131642 certs.go:256] generating profile certs ...
	I0916 10:58:38.034631 2131642 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key
	I0916 10:58:38.034667 2131642 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.bc2ee994
	I0916 10:58:38.034771 2131642 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.bc2ee994 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.49.2 192.168.49.3 192.168.49.254]
	I0916 10:58:38.419839 2131642 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.bc2ee994 ...
	I0916 10:58:38.419870 2131642 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.bc2ee994: {Name:mk2d6b6012e0c0783114e6d5770b0514b986ef2d Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:58:38.420049 2131642 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.bc2ee994 ...
	I0916 10:58:38.420068 2131642 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.bc2ee994: {Name:mk2a1b72307ada0b7e8b25cdcbd3490476a4e822 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:58:38.420148 2131642 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt.bc2ee994 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt
	I0916 10:58:38.420298 2131642 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.bc2ee994 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key
	I0916 10:58:38.420436 2131642 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key
	I0916 10:58:38.420455 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:58:38.420471 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:58:38.420491 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:58:38.420504 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:58:38.420521 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:58:38.420537 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:58:38.420554 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:58:38.420573 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:58:38.420624 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:58:38.420658 2131642 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:58:38.420678 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:58:38.420710 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:58:38.420739 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:58:38.420766 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:58:38.420823 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:58:38.420859 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:38.420877 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:58:38.420896 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:58:38.421537 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:58:38.454439 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:58:38.480115 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:58:38.505711 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:58:38.530283 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1432 bytes)
	I0916 10:58:38.555393 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:58:38.580343 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:58:38.607575 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 10:58:38.633560 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:58:38.659297 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:58:38.683996 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:58:38.709424 2131642 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 10:58:38.727498 2131642 ssh_runner.go:195] Run: openssl version
	I0916 10:58:38.732931 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:58:38.742351 2131642 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:38.745980 2131642 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:38.746093 2131642 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:38.753373 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:58:38.762784 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:58:38.772075 2131642 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:58:38.775494 2131642 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:58:38.775561 2131642 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:58:38.782813 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:58:38.791605 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:58:38.800936 2131642 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:58:38.804556 2131642 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:58:38.804626 2131642 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:58:38.811347 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:58:38.820441 2131642 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:58:38.824190 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:58:38.830843 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:58:38.837831 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:58:38.844732 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:58:38.852220 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:58:38.859164 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:58:38.866167 2131642 kubeadm.go:392] StartCluster: {Name:ha-234759 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:f
alse kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClien
tPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:58:38.866314 2131642 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 10:58:38.866377 2131642 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 10:58:38.904278 2131642 cri.go:89] found id: "35d6358938dea6788cef8042038c69dcd63a9de42f599cfc4b7797f2edc50f67"
	I0916 10:58:38.904300 2131642 cri.go:89] found id: "41e6b84968e22184a2af0ae6cec735fddcf84294f353b02cf1533037a27300a2"
	I0916 10:58:38.904305 2131642 cri.go:89] found id: "d108cfac8fd4ff4eeef092b8bd57d5611a4f5a1e0f6ff03b515dd66901aea1da"
	I0916 10:58:38.904309 2131642 cri.go:89] found id: "9806835e6e060a0abfcf806d43a4b9a66b69879aff1b10ccf545e0ee3642321a"
	I0916 10:58:38.904313 2131642 cri.go:89] found id: "e617f849070edf4c16b86d0792dd8cbf9f27eacef983569f922b9f407467c670"
	I0916 10:58:38.904317 2131642 cri.go:89] found id: "d4bb975eec45b54011799a4101f9ba6709586b66878d5c3e3073998608e29857"
	I0916 10:58:38.904320 2131642 cri.go:89] found id: "bfcc891f8e795a1ce022ed84cfb8c02c482eb0468c843c701dbafdac0e3d2a95"
	I0916 10:58:38.904323 2131642 cri.go:89] found id: "b43574d882fbc3d7cc3c3eb1f0448a517af58314b6d7ebfedb1cf5aeac8827c8"
	I0916 10:58:38.904326 2131642 cri.go:89] found id: "63b795aabed00b1c546bdfc8a236583357fe63341f1c55f86a8e1bf68afb7aee"
	I0916 10:58:38.904332 2131642 cri.go:89] found id: "2b3270e2806d2a49ec7811927861d7e576fa169288c5b4860691e358f1febfa3"
	I0916 10:58:38.904336 2131642 cri.go:89] found id: ""
	I0916 10:58:38.904391 2131642 ssh_runner.go:195] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0916 10:58:38.917081 2131642 cri.go:116] JSON = null
	W0916 10:58:38.917130 2131642 kubeadm.go:399] unpause failed: list paused: list returned 0 containers, but ps returned 10
	I0916 10:58:38.917192 2131642 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 10:58:38.926592 2131642 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 10:58:38.926665 2131642 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 10:58:38.926741 2131642 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 10:58:38.935231 2131642 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 10:58:38.935706 2131642 kubeconfig.go:47] verify endpoint returned: get endpoint: "ha-234759" does not appear in /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:58:38.935813 2131642 kubeconfig.go:62] /home/jenkins/minikube-integration/19651-2057935/kubeconfig needs updating (will repair): [kubeconfig missing "ha-234759" cluster setting kubeconfig missing "ha-234759" context setting]
	I0916 10:58:38.936081 2131642 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:58:38.936497 2131642 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:58:38.936746 2131642 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]string(
nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 10:58:38.937404 2131642 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 10:58:38.937487 2131642 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 10:58:38.946011 2131642 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.49.2
	I0916 10:58:38.946073 2131642 kubeadm.go:597] duration metric: took 19.380506ms to restartPrimaryControlPlane
	I0916 10:58:38.946093 2131642 kubeadm.go:394] duration metric: took 79.936276ms to StartCluster
	I0916 10:58:38.946110 2131642 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:58:38.946175 2131642 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:58:38.946857 2131642 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:58:38.947076 2131642 start.go:233] HA (multi-control plane) cluster: will skip waiting for primary control-plane node &{Name: IP:192.168.49.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:58:38.947102 2131642 start.go:241] waiting for startup goroutines ...
	I0916 10:58:38.947110 2131642 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 10:58:38.947580 2131642 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:58:38.953995 2131642 out.go:177] * Enabled addons: 
	I0916 10:58:38.955658 2131642 addons.go:510] duration metric: took 8.535082ms for enable addons: enabled=[]
	I0916 10:58:38.955694 2131642 start.go:246] waiting for cluster config update ...
	I0916 10:58:38.955704 2131642 start.go:255] writing updated cluster config ...
	I0916 10:58:38.957766 2131642 out.go:201] 
	I0916 10:58:38.959759 2131642 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:58:38.959893 2131642 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:58:38.962304 2131642 out.go:177] * Starting "ha-234759-m02" control-plane node in "ha-234759" cluster
	I0916 10:58:38.964333 2131642 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:58:38.966566 2131642 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:58:38.968689 2131642 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:58:38.968710 2131642 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:58:38.968719 2131642 cache.go:56] Caching tarball of preloaded images
	I0916 10:58:38.968894 2131642 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:58:38.968914 2131642 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:58:38.969044 2131642 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	W0916 10:58:38.989608 2131642 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:58:38.989630 2131642 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:58:38.989714 2131642 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:58:38.989736 2131642 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:58:38.989744 2131642 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:58:38.989752 2131642 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:58:38.989758 2131642 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:58:38.991083 2131642 image.go:273] response: 
	I0916 10:58:39.119741 2131642 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:58:39.119781 2131642 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:58:39.119812 2131642 start.go:360] acquireMachinesLock for ha-234759-m02: {Name:mk8d038416b8f502330f7520e1c7f720d49da587 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:58:39.119883 2131642 start.go:364] duration metric: took 47.581µs to acquireMachinesLock for "ha-234759-m02"
	I0916 10:58:39.119909 2131642 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:58:39.119918 2131642 fix.go:54] fixHost starting: m02
	I0916 10:58:39.120227 2131642 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:58:39.137372 2131642 fix.go:112] recreateIfNeeded on ha-234759-m02: state=Stopped err=<nil>
	W0916 10:58:39.137399 2131642 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:58:39.140290 2131642 out.go:177] * Restarting existing docker container for "ha-234759-m02" ...
	I0916 10:58:39.142955 2131642 cli_runner.go:164] Run: docker start ha-234759-m02
	I0916 10:58:39.442727 2131642 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:58:39.466428 2131642 kic.go:430] container "ha-234759-m02" state is running.
	I0916 10:58:39.466831 2131642 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:58:39.486941 2131642 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:58:39.487277 2131642 machine.go:93] provisionDockerMachine start ...
	I0916 10:58:39.487343 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:58:39.510882 2131642 main.go:141] libmachine: Using SSH client type: native
	I0916 10:58:39.511134 2131642 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40647 <nil> <nil>}
	I0916 10:58:39.511145 2131642 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:58:39.511908 2131642 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 10:58:42.694220 2131642 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m02
	
	I0916 10:58:42.694246 2131642 ubuntu.go:169] provisioning hostname "ha-234759-m02"
	I0916 10:58:42.694311 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:58:42.742953 2131642 main.go:141] libmachine: Using SSH client type: native
	I0916 10:58:42.743204 2131642 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40647 <nil> <nil>}
	I0916 10:58:42.743222 2131642 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759-m02 && echo "ha-234759-m02" | sudo tee /etc/hostname
	I0916 10:58:43.003276 2131642 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m02
	
	I0916 10:58:43.003385 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:58:43.023294 2131642 main.go:141] libmachine: Using SSH client type: native
	I0916 10:58:43.023619 2131642 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40647 <nil> <nil>}
	I0916 10:58:43.023668 2131642 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:58:43.206964 2131642 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:58:43.207043 2131642 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:58:43.207091 2131642 ubuntu.go:177] setting up certificates
	I0916 10:58:43.207132 2131642 provision.go:84] configureAuth start
	I0916 10:58:43.207226 2131642 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:58:43.236300 2131642 provision.go:143] copyHostCerts
	I0916 10:58:43.236341 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:58:43.236379 2131642 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:58:43.236386 2131642 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:58:43.236461 2131642 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:58:43.236544 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:58:43.236560 2131642 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:58:43.236564 2131642 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:58:43.236591 2131642 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:58:43.236638 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:58:43.236654 2131642 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:58:43.236658 2131642 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:58:43.236681 2131642 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:58:43.236742 2131642 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759-m02 san=[127.0.0.1 192.168.49.3 ha-234759-m02 localhost minikube]
	I0916 10:58:43.670151 2131642 provision.go:177] copyRemoteCerts
	I0916 10:58:43.670275 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:58:43.670350 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:58:43.687613 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40647 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:58:43.794840 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:58:43.794907 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:58:43.853557 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:58:43.853616 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 10:58:43.891330 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:58:43.891437 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:58:43.953515 2131642 provision.go:87] duration metric: took 746.326546ms to configureAuth
	I0916 10:58:43.953610 2131642 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:58:43.953892 2131642 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:58:43.953932 2131642 machine.go:96] duration metric: took 4.466642484s to provisionDockerMachine
	I0916 10:58:43.953956 2131642 start.go:293] postStartSetup for "ha-234759-m02" (driver="docker")
	I0916 10:58:43.953978 2131642 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:58:43.954059 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:58:43.954130 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:58:44.004842 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40647 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:58:44.133047 2131642 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:58:44.136638 2131642 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:58:44.136671 2131642 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:58:44.136681 2131642 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:58:44.136688 2131642 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:58:44.136698 2131642 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:58:44.136755 2131642 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:58:44.136829 2131642 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:58:44.136837 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:58:44.136972 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:58:44.157121 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:58:44.196267 2131642 start.go:296] duration metric: took 242.284601ms for postStartSetup
	I0916 10:58:44.196398 2131642 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:58:44.196482 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:58:44.224289 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40647 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:58:44.335121 2131642 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:58:44.340978 2131642 fix.go:56] duration metric: took 5.221044056s for fixHost
	I0916 10:58:44.341007 2131642 start.go:83] releasing machines lock for "ha-234759-m02", held for 5.221106292s
	I0916 10:58:44.341079 2131642 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m02
	I0916 10:58:44.382552 2131642 out.go:177] * Found network options:
	I0916 10:58:44.385210 2131642 out.go:177]   - NO_PROXY=192.168.49.2
	W0916 10:58:44.388006 2131642 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:58:44.388042 2131642 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:58:44.388115 2131642 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:58:44.388162 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:58:44.388426 2131642 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:58:44.388480 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m02
	I0916 10:58:44.426843 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40647 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:58:44.444404 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40647 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m02/id_rsa Username:docker}
	I0916 10:58:44.565126 2131642 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:58:44.932369 2131642 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:58:44.932456 2131642 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:58:44.970995 2131642 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:58:44.971026 2131642 start.go:495] detecting cgroup driver to use...
	I0916 10:58:44.971060 2131642 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:58:44.971115 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:58:45.044489 2131642 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:58:45.110728 2131642 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:58:45.110808 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:58:45.164371 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:58:45.228397 2131642 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:58:45.588082 2131642 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:58:45.903955 2131642 docker.go:233] disabling docker service ...
	I0916 10:58:45.904030 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:58:45.964551 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:58:46.016033 2131642 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:58:46.299214 2131642 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:58:46.596912 2131642 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:58:46.637473 2131642 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:58:46.702764 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:58:46.745582 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:58:46.790254 2131642 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:58:46.790329 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:58:46.828759 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:58:46.861309 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:58:46.901037 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:58:46.935880 2131642 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:58:46.962070 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:58:47.014022 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:58:47.074118 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:58:47.139336 2131642 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:58:47.185204 2131642 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:58:47.228383 2131642 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:58:47.532490 2131642 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:58:48.584536 2131642 ssh_runner.go:235] Completed: sudo systemctl restart containerd: (1.052014138s)
	I0916 10:58:48.584565 2131642 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:58:48.584620 2131642 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:58:48.599116 2131642 start.go:563] Will wait 60s for crictl version
	I0916 10:58:48.599196 2131642 ssh_runner.go:195] Run: which crictl
	I0916 10:58:48.615231 2131642 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:58:48.763032 2131642 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:58:48.763111 2131642 ssh_runner.go:195] Run: containerd --version
	I0916 10:58:48.856558 2131642 ssh_runner.go:195] Run: containerd --version
	I0916 10:58:48.958183 2131642 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:58:48.960804 2131642 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:58:48.963462 2131642 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:58:48.984654 2131642 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:58:49.001024 2131642 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:58:49.036872 2131642 mustload.go:65] Loading cluster: ha-234759
	I0916 10:58:49.037123 2131642 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:58:49.037414 2131642 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:58:49.065765 2131642 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:58:49.066052 2131642 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.3
	I0916 10:58:49.066074 2131642 certs.go:194] generating shared ca certs ...
	I0916 10:58:49.066090 2131642 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:58:49.066204 2131642 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:58:49.066251 2131642 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:58:49.066263 2131642 certs.go:256] generating profile certs ...
	I0916 10:58:49.066341 2131642 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key
	I0916 10:58:49.066415 2131642 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key.991f748e
	I0916 10:58:49.066461 2131642 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key
	I0916 10:58:49.066475 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:58:49.066488 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:58:49.066505 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:58:49.066522 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:58:49.066536 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 10:58:49.066552 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 10:58:49.066569 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 10:58:49.066579 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 10:58:49.066632 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:58:49.066667 2131642 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:58:49.066710 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:58:49.066741 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:58:49.066771 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:58:49.066796 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:58:49.066841 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:58:49.066884 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:58:49.066902 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:49.066921 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:58:49.066983 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:58:49.114134 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40642 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:58:49.222958 2131642 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0916 10:58:49.227247 2131642 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0916 10:58:49.253127 2131642 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0916 10:58:49.264193 2131642 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1679 bytes)
	I0916 10:58:49.292035 2131642 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0916 10:58:49.304492 2131642 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0916 10:58:49.355920 2131642 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0916 10:58:49.363202 2131642 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1679 bytes)
	I0916 10:58:49.406997 2131642 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0916 10:58:49.421950 2131642 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0916 10:58:49.449067 2131642 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0916 10:58:49.462594 2131642 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1675 bytes)
	I0916 10:58:49.512575 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:58:49.652493 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:58:49.763289 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:58:49.851186 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:58:49.943907 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1432 bytes)
	I0916 10:58:50.035131 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0916 10:58:50.168901 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 10:58:50.276880 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 10:58:50.371566 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:58:50.499177 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:58:50.574233 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:58:50.751076 2131642 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0916 10:58:50.856876 2131642 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1679 bytes)
	I0916 10:58:50.966293 2131642 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0916 10:58:51.043750 2131642 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1679 bytes)
	I0916 10:58:51.148201 2131642 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0916 10:58:51.235148 2131642 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1675 bytes)
	I0916 10:58:51.308666 2131642 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0916 10:58:51.385046 2131642 ssh_runner.go:195] Run: openssl version
	I0916 10:58:51.407942 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:58:51.443487 2131642 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:58:51.460595 2131642 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:58:51.460679 2131642 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:58:51.469749 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:58:51.493983 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:58:51.549433 2131642 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:58:51.563680 2131642 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:58:51.563781 2131642 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:58:51.583311 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:58:51.611491 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:58:51.638872 2131642 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:51.646488 2131642 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:51.646576 2131642 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:58:51.662428 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:58:51.676662 2131642 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:58:51.687710 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 10:58:51.699771 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 10:58:51.711808 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 10:58:51.723595 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 10:58:51.732747 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 10:58:51.745868 2131642 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 10:58:51.754926 2131642 kubeadm.go:934] updating node {m02 192.168.49.3 8443 v1.31.1 containerd true true} ...
	I0916 10:58:51.755049 2131642 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:58:51.755080 2131642 kube-vip.go:115] generating kube-vip config ...
	I0916 10:58:51.755142 2131642 ssh_runner.go:195] Run: sudo sh -c "lsmod | grep ip_vs"
	I0916 10:58:51.775133 2131642 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0916 10:58:51.775205 2131642 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 192.168.49.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.0
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0916 10:58:51.775276 2131642 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:58:51.791500 2131642 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:58:51.791582 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0916 10:58:51.801963 2131642 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:58:51.825901 2131642 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:58:51.848662 2131642 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0916 10:58:51.873366 2131642 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:58:51.877346 2131642 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:58:51.901023 2131642 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:58:52.054426 2131642 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:58:52.074528 2131642 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.49.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 10:58:52.074858 2131642 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:58:52.078910 2131642 out.go:177] * Verifying Kubernetes components...
	I0916 10:58:52.081285 2131642 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:58:52.246718 2131642 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:58:52.266336 2131642 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:58:52.266631 2131642 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:58:52.266758 2131642 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:58:52.267003 2131642 node_ready.go:35] waiting up to 6m0s for node "ha-234759-m02" to be "Ready" ...
	I0916 10:58:52.267127 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:58:52.267139 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:52.267149 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:52.267153 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.401733 2131642 round_trippers.go:574] Response Status: 200 OK in 1134 milliseconds
	I0916 10:58:53.403143 2131642 node_ready.go:49] node "ha-234759-m02" has status "Ready":"True"
	I0916 10:58:53.403169 2131642 node_ready.go:38] duration metric: took 1.13614549s for node "ha-234759-m02" to be "Ready" ...
	I0916 10:58:53.403180 2131642 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:58:53.403264 2131642 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 10:58:53.403279 2131642 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 10:58:53.403348 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:58:53.403366 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.403375 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.403381 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.561740 2131642 round_trippers.go:574] Response Status: 200 OK in 158 milliseconds
	I0916 10:58:53.577089 2131642 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:53.579828 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:58:53.579871 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.579893 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.579916 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.593289 2131642 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 10:58:53.594136 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:58:53.594180 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.594212 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.594230 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.603487 2131642 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:58:53.604109 2131642 pod_ready.go:93] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:53.604153 2131642 pod_ready.go:82] duration metric: took 24.432949ms for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:53.604191 2131642 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:53.604304 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vqj8q
	I0916 10:58:53.604338 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.604359 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.604380 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.613357 2131642 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:58:53.620147 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:58:53.620202 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.620239 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.620261 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.625903 2131642 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:58:53.627932 2131642 pod_ready.go:93] pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:53.628002 2131642 pod_ready.go:82] duration metric: took 23.786018ms for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:53.628029 2131642 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:53.628131 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759
	I0916 10:58:53.628166 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.628188 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.628206 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.633849 2131642 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:58:53.635526 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:58:53.635576 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.635616 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.635638 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.645436 2131642 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:58:53.646522 2131642 pod_ready.go:93] pod "etcd-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:53.646588 2131642 pod_ready.go:82] duration metric: took 18.538522ms for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:53.646616 2131642 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:53.646727 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:58:53.646766 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.646792 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.646824 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.653767 2131642 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:58:53.655078 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:58:53.655126 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.655158 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.655176 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.658835 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:53.659782 2131642 pod_ready.go:93] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:53.659827 2131642 pod_ready.go:82] duration metric: took 13.191769ms for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:53.659871 2131642 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:53.659971 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:58:53.659995 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.660028 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.660049 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.663137 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:53.664371 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:58:53.664414 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.664447 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.664464 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.671047 2131642 round_trippers.go:574] Response Status: 404 Not Found in 6 milliseconds
	I0916 10:58:53.671370 2131642 pod_ready.go:98] node "ha-234759-m03" hosting pod "etcd-ha-234759-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-234759-m03": nodes "ha-234759-m03" not found
	I0916 10:58:53.671431 2131642 pod_ready.go:82] duration metric: took 11.513687ms for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	E0916 10:58:53.671456 2131642 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-234759-m03" hosting pod "etcd-ha-234759-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-234759-m03": nodes "ha-234759-m03" not found
	I0916 10:58:53.671501 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:53.803811 2131642 request.go:632] Waited for 132.226229ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:58:53.803936 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:58:53.803980 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:53.804007 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:53.804025 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:53.807489 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:54.004194 2131642 request.go:632] Waited for 195.315942ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:58:54.004390 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:58:54.004418 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:54.004450 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.004466 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.016114 2131642 round_trippers.go:574] Response Status: 200 OK in 11 milliseconds
	I0916 10:58:54.017185 2131642 pod_ready.go:93] pod "kube-apiserver-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:54.017212 2131642 pod_ready.go:82] duration metric: took 345.683657ms for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.017229 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.203445 2131642 request.go:632] Waited for 186.132436ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:58:54.203506 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:58:54.203514 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:54.203525 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.203531 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.222412 2131642 round_trippers.go:574] Response Status: 200 OK in 18 milliseconds
	I0916 10:58:54.403775 2131642 request.go:632] Waited for 178.259891ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:58:54.403850 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:58:54.403863 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:54.403871 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.403876 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.412212 2131642 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:58:54.413168 2131642 pod_ready.go:93] pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:54.413193 2131642 pod_ready.go:82] duration metric: took 395.950689ms for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.413205 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:54.603719 2131642 request.go:632] Waited for 190.437445ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:58:54.603792 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:58:54.603807 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:54.603819 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.603824 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.611945 2131642 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 10:58:54.803916 2131642 request.go:632] Waited for 189.180014ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:58:54.803973 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:58:54.803984 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:54.803996 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:54.804003 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:54.814975 2131642 round_trippers.go:574] Response Status: 404 Not Found in 10 milliseconds
	I0916 10:58:54.815518 2131642 pod_ready.go:98] node "ha-234759-m03" hosting pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-234759-m03": nodes "ha-234759-m03" not found
	I0916 10:58:54.815543 2131642 pod_ready.go:82] duration metric: took 402.3266ms for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	E0916 10:58:54.815554 2131642 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-234759-m03" hosting pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-234759-m03": nodes "ha-234759-m03" not found
	I0916 10:58:54.815563 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:55.006209 2131642 request.go:632] Waited for 190.446233ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:58:55.006291 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:58:55.006298 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:55.006307 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:55.006311 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:55.011063 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:58:55.203853 2131642 request.go:632] Waited for 191.366417ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:58:55.203923 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:58:55.203931 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:55.203940 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:55.203945 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:55.207252 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:55.207821 2131642 pod_ready.go:93] pod "kube-controller-manager-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:55.207840 2131642 pod_ready.go:82] duration metric: took 392.268019ms for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:55.207853 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:55.403849 2131642 request.go:632] Waited for 195.919031ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:58:55.403929 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:58:55.403940 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:55.403949 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:55.403957 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:55.408319 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:58:55.604020 2131642 request.go:632] Waited for 194.323723ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:58:55.604079 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:58:55.604085 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:55.604093 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:55.604098 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:55.607380 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:55.608004 2131642 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:55.608021 2131642 pod_ready.go:82] duration metric: took 400.160739ms for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:55.608047 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:55.803896 2131642 request.go:632] Waited for 195.779783ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:58:55.804008 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:58:55.804016 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:55.804024 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:55.804028 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:55.807449 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:56.003467 2131642 request.go:632] Waited for 195.277261ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:58:56.003544 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:58:56.003550 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:56.003560 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:56.003564 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:56.008268 2131642 round_trippers.go:574] Response Status: 404 Not Found in 4 milliseconds
	I0916 10:58:56.008442 2131642 pod_ready.go:98] node "ha-234759-m03" hosting pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-234759-m03": nodes "ha-234759-m03" not found
	I0916 10:58:56.008461 2131642 pod_ready.go:82] duration metric: took 400.399983ms for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	E0916 10:58:56.008487 2131642 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-234759-m03" hosting pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-234759-m03": nodes "ha-234759-m03" not found
	I0916 10:58:56.008503 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:56.203877 2131642 request.go:632] Waited for 195.29989ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:58:56.203998 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:58:56.204011 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:56.204022 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:56.204027 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:56.207187 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:56.403386 2131642 request.go:632] Waited for 195.299735ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:58:56.403499 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:58:56.403513 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:56.403522 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:56.403527 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:56.406614 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:56.407271 2131642 pod_ready.go:93] pod "kube-proxy-f4jm2" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:56.407293 2131642 pod_ready.go:82] duration metric: took 398.780419ms for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:56.407305 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:56.603689 2131642 request.go:632] Waited for 196.311702ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:58:56.603775 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:58:56.603786 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:56.603795 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:56.603802 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:56.607250 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:56.804340 2131642 request.go:632] Waited for 196.36744ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:58:56.804391 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:58:56.804397 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:56.804406 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:56.804409 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:56.807596 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:56.808255 2131642 pod_ready.go:93] pod "kube-proxy-gwdl4" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:56.808278 2131642 pod_ready.go:82] duration metric: took 400.964828ms for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:56.808289 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-m84xg" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:57.004531 2131642 request.go:632] Waited for 196.162788ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:58:57.004644 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:58:57.004651 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:57.004667 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:57.004673 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:57.014625 2131642 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 10:58:57.203910 2131642 request.go:632] Waited for 188.293ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:58:57.203975 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:58:57.203983 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:57.203992 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:57.203996 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:57.207068 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:57.207693 2131642 pod_ready.go:93] pod "kube-proxy-m84xg" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:57.207711 2131642 pod_ready.go:82] duration metric: took 399.414713ms for pod "kube-proxy-m84xg" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:57.207723 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:57.404130 2131642 request.go:632] Waited for 196.327709ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:58:57.404206 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:58:57.404212 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:57.404221 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:57.404230 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:57.407576 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:57.603468 2131642 request.go:632] Waited for 195.156366ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:58:57.603525 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:58:57.603530 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:57.603554 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:57.603578 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:57.606483 2131642 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 10:58:57.606622 2131642 pod_ready.go:98] node "ha-234759-m03" hosting pod "kube-proxy-qrdxc" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-234759-m03": nodes "ha-234759-m03" not found
	I0916 10:58:57.606636 2131642 pod_ready.go:82] duration metric: took 398.89862ms for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	E0916 10:58:57.606647 2131642 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-234759-m03" hosting pod "kube-proxy-qrdxc" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-234759-m03": nodes "ha-234759-m03" not found
	I0916 10:58:57.606654 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:57.803959 2131642 request.go:632] Waited for 197.18163ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:58:57.804019 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:58:57.804025 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:57.804034 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:57.804043 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:57.807108 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:58.003808 2131642 request.go:632] Waited for 196.049762ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:58:58.003893 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:58:58.003899 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:58.003908 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:58.003913 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:58.009283 2131642 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:58:58.009891 2131642 pod_ready.go:93] pod "kube-scheduler-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:58.009913 2131642 pod_ready.go:82] duration metric: took 403.245745ms for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:58.009925 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:58.203846 2131642 request.go:632] Waited for 193.837379ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:58:58.203956 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:58:58.203987 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:58.204003 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:58.204008 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:58.207113 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:58.403440 2131642 request.go:632] Waited for 195.266166ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:58:58.403509 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:58:58.403520 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:58.403529 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:58.403536 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:58.406611 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:58.407230 2131642 pod_ready.go:93] pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:58:58.407252 2131642 pod_ready.go:82] duration metric: took 397.319465ms for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:58.407264 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:58:58.604235 2131642 request.go:632] Waited for 196.872675ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:58:58.604356 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:58:58.604370 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:58.604379 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:58.604385 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:58.608009 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:58.803977 2131642 request.go:632] Waited for 195.334671ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:58:58.804055 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m03
	I0916 10:58:58.804067 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:58.804111 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:58.804117 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:58.807344 2131642 round_trippers.go:574] Response Status: 404 Not Found in 3 milliseconds
	I0916 10:58:58.807485 2131642 pod_ready.go:98] node "ha-234759-m03" hosting pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-234759-m03": nodes "ha-234759-m03" not found
	I0916 10:58:58.807501 2131642 pod_ready.go:82] duration metric: took 400.214527ms for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	E0916 10:58:58.807513 2131642 pod_ready.go:67] WaitExtra: waitPodCondition: node "ha-234759-m03" hosting pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "ha-234759-m03": nodes "ha-234759-m03" not found
	I0916 10:58:58.807535 2131642 pod_ready.go:39] duration metric: took 5.404337742s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:58:58.807562 2131642 api_server.go:52] waiting for apiserver process to appear ...
	I0916 10:58:58.807674 2131642 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:58:58.818420 2131642 api_server.go:72] duration metric: took 6.743820691s to wait for apiserver process to appear ...
	I0916 10:58:58.818444 2131642 api_server.go:88] waiting for apiserver healthz status ...
	I0916 10:58:58.818493 2131642 api_server.go:253] Checking apiserver healthz at https://192.168.49.2:8443/healthz ...
	I0916 10:58:58.827144 2131642 api_server.go:279] https://192.168.49.2:8443/healthz returned 200:
	ok
	I0916 10:58:58.827215 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/version
	I0916 10:58:58.827227 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:58.827237 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:58.827246 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:58.828194 2131642 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 10:58:58.828369 2131642 api_server.go:141] control plane version: v1.31.1
	I0916 10:58:58.828392 2131642 api_server.go:131] duration metric: took 9.940836ms to wait for apiserver health ...
	I0916 10:58:58.828400 2131642 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 10:58:59.003560 2131642 request.go:632] Waited for 175.068524ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:58:59.003649 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:58:59.003656 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:59.003665 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:59.003670 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:59.011210 2131642 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:58:59.025174 2131642 system_pods.go:59] 26 kube-system pods found
	I0916 10:58:59.025265 2131642 system_pods.go:61] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:58:59.025288 2131642 system_pods.go:61] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:58:59.025310 2131642 system_pods.go:61] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:58:59.025343 2131642 system_pods.go:61] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:58:59.025375 2131642 system_pods.go:61] "etcd-ha-234759-m03" [701fa3e0-9014-4f92-9734-6b708cb56aa0] Running
	I0916 10:58:59.025394 2131642 system_pods.go:61] "kindnet-jhkc5" [59ea0a34-9a2b-44f2-901f-2bcc13b91a1b] Running
	I0916 10:58:59.025414 2131642 system_pods.go:61] "kindnet-lwtj4" [581ce31f-8039-42fe-a2a8-d64ec858cd32] Running
	I0916 10:58:59.025433 2131642 system_pods.go:61] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:58:59.025461 2131642 system_pods.go:61] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:58:59.025496 2131642 system_pods.go:61] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:58:59.025523 2131642 system_pods.go:61] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:58:59.025573 2131642 system_pods.go:61] "kube-apiserver-ha-234759-m03" [136cdbc8-ac04-4ca9-85a2-17ff91df20c0] Running
	I0916 10:58:59.025604 2131642 system_pods.go:61] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:58:59.025623 2131642 system_pods.go:61] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:58:59.025641 2131642 system_pods.go:61] "kube-controller-manager-ha-234759-m03" [61a9f46c-7889-49f1-afe7-1bbce98f0b5a] Running
	I0916 10:58:59.025664 2131642 system_pods.go:61] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:58:59.025700 2131642 system_pods.go:61] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:58:59.025718 2131642 system_pods.go:61] "kube-proxy-m84xg" [6b84ade0-ae34-4c7e-b1ed-e9c83f4bf130] Running
	I0916 10:58:59.025735 2131642 system_pods.go:61] "kube-proxy-qrdxc" [d08c1deb-9b33-4ec1-b15e-6dce465c00d9] Running
	I0916 10:58:59.025757 2131642 system_pods.go:61] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:58:59.025836 2131642 system_pods.go:61] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:58:59.025859 2131642 system_pods.go:61] "kube-scheduler-ha-234759-m03" [deab7491-330f-496f-985f-3b1882eeeb60] Running
	I0916 10:58:59.025876 2131642 system_pods.go:61] "kube-vip-ha-234759" [7dcc7fb0-91ad-4d60-a7fd-3c49deeb0ac0] Running
	I0916 10:58:59.025908 2131642 system_pods.go:61] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:58:59.025934 2131642 system_pods.go:61] "kube-vip-ha-234759-m03" [44f3fbf3-8803-4d2f-884c-470cdc81e835] Running
	I0916 10:58:59.025953 2131642 system_pods.go:61] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:58:59.025972 2131642 system_pods.go:74] duration metric: took 197.565529ms to wait for pod list to return data ...
	I0916 10:58:59.026009 2131642 default_sa.go:34] waiting for default service account to be created ...
	I0916 10:58:59.203488 2131642 request.go:632] Waited for 177.353314ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:58:59.203552 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 10:58:59.203563 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:59.203571 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:59.203582 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:59.207699 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:58:59.207945 2131642 default_sa.go:45] found service account: "default"
	I0916 10:58:59.207966 2131642 default_sa.go:55] duration metric: took 181.934335ms for default service account to be created ...
	I0916 10:58:59.207976 2131642 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 10:58:59.404373 2131642 request.go:632] Waited for 196.328684ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:58:59.404434 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:58:59.404441 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:59.404450 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:59.404459 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:59.410132 2131642 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:58:59.419637 2131642 system_pods.go:86] 26 kube-system pods found
	I0916 10:58:59.419676 2131642 system_pods.go:89] "coredns-7c65d6cfc9-2l4br" [18d893a2-274a-413e-bf3d-0dd1e88a9984] Running
	I0916 10:58:59.419684 2131642 system_pods.go:89] "coredns-7c65d6cfc9-vqj8q" [2c14618b-0831-4e8b-be9e-ba0049031bdb] Running
	I0916 10:58:59.419689 2131642 system_pods.go:89] "etcd-ha-234759" [ddce4dd8-50d9-4e3c-b889-8a45240980eb] Running
	I0916 10:58:59.419694 2131642 system_pods.go:89] "etcd-ha-234759-m02" [392fc64b-179e-446c-8a9d-3d150fee078a] Running
	I0916 10:58:59.419699 2131642 system_pods.go:89] "etcd-ha-234759-m03" [701fa3e0-9014-4f92-9734-6b708cb56aa0] Running
	I0916 10:58:59.419731 2131642 system_pods.go:89] "kindnet-jhkc5" [59ea0a34-9a2b-44f2-901f-2bcc13b91a1b] Running
	I0916 10:58:59.419737 2131642 system_pods.go:89] "kindnet-lwtj4" [581ce31f-8039-42fe-a2a8-d64ec858cd32] Running
	I0916 10:58:59.419751 2131642 system_pods.go:89] "kindnet-q8nl6" [ee79653f-8a9e-41e2-82bb-7b08fc87e265] Running
	I0916 10:58:59.419755 2131642 system_pods.go:89] "kindnet-svsnq" [fdf851b1-7b61-4611-ae5f-ad108e042f66] Running
	I0916 10:58:59.419760 2131642 system_pods.go:89] "kube-apiserver-ha-234759" [3738ecfc-7507-4e7e-8132-958e0492bee2] Running
	I0916 10:58:59.419764 2131642 system_pods.go:89] "kube-apiserver-ha-234759-m02" [4989cbcb-47b9-49ee-bdf5-0b4a63becd9e] Running
	I0916 10:58:59.419772 2131642 system_pods.go:89] "kube-apiserver-ha-234759-m03" [136cdbc8-ac04-4ca9-85a2-17ff91df20c0] Running
	I0916 10:58:59.419777 2131642 system_pods.go:89] "kube-controller-manager-ha-234759" [e2356638-39ef-45cd-a379-e5cfd9d11ee7] Running
	I0916 10:58:59.419781 2131642 system_pods.go:89] "kube-controller-manager-ha-234759-m02" [0d7ac556-6b6b-4801-a760-de4f92ca72d8] Running
	I0916 10:58:59.419792 2131642 system_pods.go:89] "kube-controller-manager-ha-234759-m03" [61a9f46c-7889-49f1-afe7-1bbce98f0b5a] Running
	I0916 10:58:59.419798 2131642 system_pods.go:89] "kube-proxy-f4jm2" [4fece504-98b0-4ae1-95ba-2b2356dab7fe] Running
	I0916 10:58:59.419802 2131642 system_pods.go:89] "kube-proxy-gwdl4" [8ea118a7-cc54-4dd9-8bb2-cfc133a376fc] Running
	I0916 10:58:59.419805 2131642 system_pods.go:89] "kube-proxy-m84xg" [6b84ade0-ae34-4c7e-b1ed-e9c83f4bf130] Running
	I0916 10:58:59.419809 2131642 system_pods.go:89] "kube-proxy-qrdxc" [d08c1deb-9b33-4ec1-b15e-6dce465c00d9] Running
	I0916 10:58:59.419813 2131642 system_pods.go:89] "kube-scheduler-ha-234759" [1b103573-3266-4751-8325-3dfa443638e1] Running
	I0916 10:58:59.419821 2131642 system_pods.go:89] "kube-scheduler-ha-234759-m02" [0f052b12-74a0-4555-9172-5ba35846b726] Running
	I0916 10:58:59.419825 2131642 system_pods.go:89] "kube-scheduler-ha-234759-m03" [deab7491-330f-496f-985f-3b1882eeeb60] Running
	I0916 10:58:59.419831 2131642 system_pods.go:89] "kube-vip-ha-234759" [7dcc7fb0-91ad-4d60-a7fd-3c49deeb0ac0] Running
	I0916 10:58:59.419836 2131642 system_pods.go:89] "kube-vip-ha-234759-m02" [a04bdbf5-623a-4e81-9cd8-60ed45e4be07] Running
	I0916 10:58:59.419842 2131642 system_pods.go:89] "kube-vip-ha-234759-m03" [44f3fbf3-8803-4d2f-884c-470cdc81e835] Running
	I0916 10:58:59.419846 2131642 system_pods.go:89] "storage-provisioner" [e8924914-9ba5-4adc-ac46-9d3d97b0bc08] Running
	I0916 10:58:59.419855 2131642 system_pods.go:126] duration metric: took 211.868908ms to wait for k8s-apps to be running ...
	I0916 10:58:59.419868 2131642 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:58:59.419930 2131642 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:58:59.431865 2131642 system_svc.go:56] duration metric: took 11.989804ms WaitForService to wait for kubelet
	I0916 10:58:59.431909 2131642 kubeadm.go:582] duration metric: took 7.357311348s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:58:59.431952 2131642 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:58:59.604394 2131642 request.go:632] Waited for 172.354901ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:58:59.604454 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:58:59.604459 2131642 round_trippers.go:469] Request Headers:
	I0916 10:58:59.604468 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:58:59.604476 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:58:59.608064 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:58:59.609317 2131642 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:58:59.609349 2131642 node_conditions.go:123] node cpu capacity is 2
	I0916 10:58:59.609360 2131642 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:58:59.609366 2131642 node_conditions.go:123] node cpu capacity is 2
	I0916 10:58:59.609370 2131642 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:58:59.609375 2131642 node_conditions.go:123] node cpu capacity is 2
	I0916 10:58:59.609380 2131642 node_conditions.go:105] duration metric: took 177.422098ms to run NodePressure ...
	I0916 10:58:59.609393 2131642 start.go:241] waiting for startup goroutines ...
	I0916 10:58:59.609416 2131642 start.go:255] writing updated cluster config ...
	I0916 10:58:59.612682 2131642 out.go:201] 
	I0916 10:58:59.615634 2131642 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:58:59.615751 2131642 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:58:59.619014 2131642 out.go:177] * Starting "ha-234759-m04" worker node in "ha-234759" cluster
	I0916 10:58:59.622510 2131642 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:58:59.625270 2131642 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:58:59.627783 2131642 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 10:58:59.627815 2131642 cache.go:56] Caching tarball of preloaded images
	I0916 10:58:59.627894 2131642 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:58:59.627942 2131642 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 10:58:59.627960 2131642 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 10:58:59.628092 2131642 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	W0916 10:58:59.649258 2131642 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 10:58:59.649277 2131642 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:58:59.649364 2131642 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:58:59.649385 2131642 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 10:58:59.649397 2131642 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 10:58:59.649405 2131642 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 10:58:59.649411 2131642 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 10:58:59.650786 2131642 image.go:273] response: 
	I0916 10:58:59.772090 2131642 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 10:58:59.772130 2131642 cache.go:194] Successfully downloaded all kic artifacts
	I0916 10:58:59.772161 2131642 start.go:360] acquireMachinesLock for ha-234759-m04: {Name:mk1736d6dcf4c8e0380a733a9a6df0f9a97ba403 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 10:58:59.772225 2131642 start.go:364] duration metric: took 42.83µs to acquireMachinesLock for "ha-234759-m04"
	I0916 10:58:59.772252 2131642 start.go:96] Skipping create...Using existing machine configuration
	I0916 10:58:59.772257 2131642 fix.go:54] fixHost starting: m04
	I0916 10:58:59.772528 2131642 cli_runner.go:164] Run: docker container inspect ha-234759-m04 --format={{.State.Status}}
	I0916 10:58:59.789723 2131642 fix.go:112] recreateIfNeeded on ha-234759-m04: state=Stopped err=<nil>
	W0916 10:58:59.789749 2131642 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 10:58:59.793003 2131642 out.go:177] * Restarting existing docker container for "ha-234759-m04" ...
	I0916 10:58:59.795797 2131642 cli_runner.go:164] Run: docker start ha-234759-m04
	I0916 10:59:00.329257 2131642 cli_runner.go:164] Run: docker container inspect ha-234759-m04 --format={{.State.Status}}
	I0916 10:59:00.380050 2131642 kic.go:430] container "ha-234759-m04" state is running.
	I0916 10:59:00.380499 2131642 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m04
	I0916 10:59:00.420983 2131642 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/config.json ...
	I0916 10:59:00.423526 2131642 machine.go:93] provisionDockerMachine start ...
	I0916 10:59:00.423613 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:59:00.458924 2131642 main.go:141] libmachine: Using SSH client type: native
	I0916 10:59:00.459225 2131642 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40652 <nil> <nil>}
	I0916 10:59:00.459242 2131642 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 10:59:00.463313 2131642 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:48158->127.0.0.1:40652: read: connection reset by peer
	I0916 10:59:03.628280 2131642 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m04
	
	I0916 10:59:03.628308 2131642 ubuntu.go:169] provisioning hostname "ha-234759-m04"
	I0916 10:59:03.628403 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:59:03.651921 2131642 main.go:141] libmachine: Using SSH client type: native
	I0916 10:59:03.652163 2131642 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40652 <nil> <nil>}
	I0916 10:59:03.652174 2131642 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-234759-m04 && echo "ha-234759-m04" | sudo tee /etc/hostname
	I0916 10:59:03.898976 2131642 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-234759-m04
	
	I0916 10:59:03.899146 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:59:03.931340 2131642 main.go:141] libmachine: Using SSH client type: native
	I0916 10:59:03.931585 2131642 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40652 <nil> <nil>}
	I0916 10:59:03.931602 2131642 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-234759-m04' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-234759-m04/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-234759-m04' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 10:59:04.123071 2131642 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 10:59:04.123100 2131642 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 10:59:04.123119 2131642 ubuntu.go:177] setting up certificates
	I0916 10:59:04.123130 2131642 provision.go:84] configureAuth start
	I0916 10:59:04.123197 2131642 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m04
	I0916 10:59:04.150990 2131642 provision.go:143] copyHostCerts
	I0916 10:59:04.151029 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:59:04.151059 2131642 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 10:59:04.151067 2131642 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 10:59:04.151137 2131642 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 10:59:04.151211 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:59:04.151228 2131642 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 10:59:04.151232 2131642 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 10:59:04.151259 2131642 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 10:59:04.151299 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:59:04.151315 2131642 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 10:59:04.151320 2131642 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 10:59:04.151345 2131642 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 10:59:04.151392 2131642 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.ha-234759-m04 san=[127.0.0.1 192.168.49.5 ha-234759-m04 localhost minikube]
	I0916 10:59:04.603377 2131642 provision.go:177] copyRemoteCerts
	I0916 10:59:04.603495 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 10:59:04.603575 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:59:04.627302 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40652 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m04/id_rsa Username:docker}
	I0916 10:59:04.760961 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 10:59:04.761045 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1208 bytes)
	I0916 10:59:04.806279 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 10:59:04.806339 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 10:59:04.857883 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 10:59:04.857940 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 10:59:04.924419 2131642 provision.go:87] duration metric: took 801.274061ms to configureAuth
	I0916 10:59:04.924497 2131642 ubuntu.go:193] setting minikube options for container-runtime
	I0916 10:59:04.924827 2131642 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:59:04.924863 2131642 machine.go:96] duration metric: took 4.501317404s to provisionDockerMachine
	I0916 10:59:04.924893 2131642 start.go:293] postStartSetup for "ha-234759-m04" (driver="docker")
	I0916 10:59:04.924918 2131642 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 10:59:04.925004 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 10:59:04.925075 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:59:04.953887 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40652 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m04/id_rsa Username:docker}
	I0916 10:59:05.068968 2131642 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 10:59:05.073892 2131642 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 10:59:05.073932 2131642 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 10:59:05.073943 2131642 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 10:59:05.073951 2131642 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 10:59:05.073961 2131642 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 10:59:05.074025 2131642 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 10:59:05.074284 2131642 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 10:59:05.074295 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 10:59:05.074408 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 10:59:05.093408 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:59:05.152541 2131642 start.go:296] duration metric: took 227.61866ms for postStartSetup
	I0916 10:59:05.152704 2131642 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:59:05.152788 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:59:05.177239 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40652 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m04/id_rsa Username:docker}
	I0916 10:59:05.290569 2131642 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 10:59:05.298505 2131642 fix.go:56] duration metric: took 5.526238573s for fixHost
	I0916 10:59:05.298533 2131642 start.go:83] releasing machines lock for "ha-234759-m04", held for 5.526293211s
	I0916 10:59:05.298606 2131642 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m04
	I0916 10:59:05.331149 2131642 out.go:177] * Found network options:
	I0916 10:59:05.333950 2131642 out.go:177]   - NO_PROXY=192.168.49.2,192.168.49.3
	W0916 10:59:05.336554 2131642 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:59:05.336588 2131642 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:59:05.336612 2131642 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 10:59:05.336623 2131642 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 10:59:05.336695 2131642 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 10:59:05.336740 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:59:05.336771 2131642 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 10:59:05.336827 2131642 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:59:05.369273 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40652 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m04/id_rsa Username:docker}
	I0916 10:59:05.374568 2131642 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40652 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m04/id_rsa Username:docker}
	I0916 10:59:05.502806 2131642 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 10:59:05.685737 2131642 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 10:59:05.685855 2131642 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 10:59:05.701154 2131642 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 10:59:05.701181 2131642 start.go:495] detecting cgroup driver to use...
	I0916 10:59:05.701236 2131642 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 10:59:05.701317 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 10:59:05.729327 2131642 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 10:59:05.749082 2131642 docker.go:217] disabling cri-docker service (if available) ...
	I0916 10:59:05.749177 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 10:59:05.776224 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 10:59:05.797540 2131642 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 10:59:06.017713 2131642 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 10:59:06.173991 2131642 docker.go:233] disabling docker service ...
	I0916 10:59:06.174120 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 10:59:06.197074 2131642 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 10:59:06.226263 2131642 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 10:59:06.374331 2131642 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 10:59:06.552248 2131642 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 10:59:06.564566 2131642 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 10:59:06.585153 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 10:59:06.597634 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 10:59:06.614475 2131642 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 10:59:06.614594 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 10:59:06.629418 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:59:06.642547 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 10:59:06.656461 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 10:59:06.667979 2131642 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 10:59:06.678231 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 10:59:06.688713 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 10:59:06.700099 2131642 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 10:59:06.711750 2131642 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 10:59:06.720774 2131642 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 10:59:06.729937 2131642 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:59:06.852068 2131642 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 10:59:07.117997 2131642 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 10:59:07.118068 2131642 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 10:59:07.121766 2131642 start.go:563] Will wait 60s for crictl version
	I0916 10:59:07.121832 2131642 ssh_runner.go:195] Run: which crictl
	I0916 10:59:07.125497 2131642 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 10:59:07.182872 2131642 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 10:59:07.183000 2131642 ssh_runner.go:195] Run: containerd --version
	I0916 10:59:07.207761 2131642 ssh_runner.go:195] Run: containerd --version
	I0916 10:59:07.239341 2131642 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 10:59:07.241927 2131642 out.go:177]   - env NO_PROXY=192.168.49.2
	I0916 10:59:07.244464 2131642 out.go:177]   - env NO_PROXY=192.168.49.2,192.168.49.3
	I0916 10:59:07.247217 2131642 cli_runner.go:164] Run: docker network inspect ha-234759 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 10:59:07.261524 2131642 ssh_runner.go:195] Run: grep 192.168.49.1	host.minikube.internal$ /etc/hosts
	I0916 10:59:07.265667 2131642 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.49.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:59:07.280794 2131642 mustload.go:65] Loading cluster: ha-234759
	I0916 10:59:07.281035 2131642 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:59:07.281307 2131642 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:59:07.299634 2131642 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:59:07.299911 2131642 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759 for IP: 192.168.49.5
	I0916 10:59:07.299925 2131642 certs.go:194] generating shared ca certs ...
	I0916 10:59:07.299940 2131642 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 10:59:07.300058 2131642 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 10:59:07.300105 2131642 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 10:59:07.300120 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 10:59:07.300136 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 10:59:07.300146 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 10:59:07.300157 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 10:59:07.300215 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 10:59:07.300251 2131642 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 10:59:07.300263 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 10:59:07.300287 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 10:59:07.300323 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 10:59:07.300352 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 10:59:07.300399 2131642 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 10:59:07.300432 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 10:59:07.300448 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:59:07.300462 2131642 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 10:59:07.300482 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 10:59:07.330559 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 10:59:07.357401 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 10:59:07.384036 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 10:59:07.409138 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 10:59:07.436676 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 10:59:07.463947 2131642 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 10:59:07.489688 2131642 ssh_runner.go:195] Run: openssl version
	I0916 10:59:07.496496 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 10:59:07.506757 2131642 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 10:59:07.511708 2131642 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 10:59:07.511781 2131642 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 10:59:07.519097 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 10:59:07.529137 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 10:59:07.539436 2131642 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 10:59:07.543373 2131642 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 10:59:07.543441 2131642 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 10:59:07.552449 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 10:59:07.563349 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 10:59:07.573005 2131642 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:59:07.576814 2131642 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:59:07.576903 2131642 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 10:59:07.584471 2131642 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 10:59:07.594448 2131642 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 10:59:07.598289 2131642 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 10:59:07.598378 2131642 kubeadm.go:934] updating node {m04 192.168.49.5 0 v1.31.1  false true} ...
	I0916 10:59:07.598485 2131642 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-234759-m04 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.49.5
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:ha-234759 Namespace:default APIServerHAVIP:192.168.49.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 10:59:07.598563 2131642 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 10:59:07.607589 2131642 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 10:59:07.607737 2131642 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 10:59:07.617033 2131642 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (317 bytes)
	I0916 10:59:07.644317 2131642 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 10:59:07.664406 2131642 ssh_runner.go:195] Run: grep 192.168.49.254	control-plane.minikube.internal$ /etc/hosts
	I0916 10:59:07.668079 2131642 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.49.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 10:59:07.680442 2131642 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:59:07.767463 2131642 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:59:07.780096 2131642 start.go:235] Will wait 6m0s for node &{Name:m04 IP:192.168.49.5 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}
	I0916 10:59:07.780549 2131642 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:59:07.784004 2131642 out.go:177] * Verifying Kubernetes components...
	I0916 10:59:07.786304 2131642 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 10:59:07.882523 2131642 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 10:59:07.898260 2131642 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:59:07.898639 2131642 kapi.go:59] client config for ha-234759: &rest.Config{Host:"https://192.168.49.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/ha-234759/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProtos:[]strin
g(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0916 10:59:07.898798 2131642 kubeadm.go:483] Overriding stale ClientConfig host https://192.168.49.254:8443 with https://192.168.49.2:8443
	I0916 10:59:07.899027 2131642 node_ready.go:35] waiting up to 6m0s for node "ha-234759-m04" to be "Ready" ...
	I0916 10:59:07.899108 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:59:07.899119 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:07.899128 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:07.899141 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:07.902202 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:07.903004 2131642 node_ready.go:49] node "ha-234759-m04" has status "Ready":"True"
	I0916 10:59:07.903030 2131642 node_ready.go:38] duration metric: took 3.980711ms for node "ha-234759-m04" to be "Ready" ...
	I0916 10:59:07.903041 2131642 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:59:07.903109 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods
	I0916 10:59:07.903121 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:07.903129 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:07.903133 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:07.908900 2131642 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:59:07.921685 2131642 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:07.924182 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:07.924207 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:07.924216 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:07.924220 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:07.935031 2131642 round_trippers.go:574] Response Status: 200 OK in 10 milliseconds
	I0916 10:59:07.936069 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:07.936094 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:07.936105 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:07.936112 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:07.941986 2131642 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:59:08.422646 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:08.422670 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:08.422714 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:08.422724 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:08.425969 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:08.426986 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:08.427009 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:08.427019 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:08.427023 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:08.429769 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:08.921933 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:08.921963 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:08.921973 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:08.921978 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:08.924910 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:08.925951 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:08.926003 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:08.926027 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:08.926045 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:08.933723 2131642 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:59:09.422391 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:09.422414 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:09.422423 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:09.422429 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:09.425608 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:09.426436 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:09.426456 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:09.426467 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:09.426471 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:09.429245 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:09.921962 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:09.921984 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:09.921998 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:09.922004 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:09.925268 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:09.926039 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:09.926060 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:09.926069 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:09.926072 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:09.928894 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:09.929488 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:10.422721 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:10.422744 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:10.422754 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:10.422758 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:10.426045 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:10.426867 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:10.426886 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:10.426896 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:10.426901 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:10.429867 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:10.922128 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:10.922151 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:10.922169 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:10.922174 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:10.926285 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:10.927203 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:10.927227 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:10.927237 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:10.927244 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:10.930026 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:11.422906 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:11.422933 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:11.422943 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:11.422949 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:11.427178 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:11.428042 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:11.428064 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:11.428073 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:11.428076 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:11.431324 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:11.922554 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:11.922579 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:11.922589 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:11.922595 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:11.926091 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:11.927228 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:11.927252 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:11.927262 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:11.927266 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:11.930421 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:11.931056 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:12.422083 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:12.422104 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:12.422113 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:12.422119 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:12.425209 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:12.425964 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:12.425982 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:12.425992 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:12.425996 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:12.428822 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:12.922029 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:12.922104 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:12.922121 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:12.922125 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:12.925046 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:12.926101 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:12.926129 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:12.926139 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:12.926143 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:12.929424 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:13.421980 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:13.422007 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:13.422016 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.422020 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.425221 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:13.426172 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:13.426190 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:13.426206 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.426210 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.429211 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:13.921960 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:13.921984 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:13.921994 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.922000 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.925118 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:13.925901 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:13.925920 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:13.925958 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:13.925969 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:13.928950 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:14.422222 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:14.422244 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:14.422254 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:14.422263 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:14.425809 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:14.426851 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:14.426870 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:14.426879 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:14.426884 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:14.429885 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:14.430550 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:14.922037 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:14.922060 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:14.922070 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:14.922074 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:14.925457 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:14.926245 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:14.926262 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:14.926272 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:14.926280 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:14.929151 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:15.422488 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:15.422511 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:15.422520 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:15.422525 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:15.425661 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:15.426791 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:15.426811 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:15.426821 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:15.426825 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:15.429615 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:15.922053 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:15.922077 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:15.922098 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:15.922103 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:15.925576 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:15.926496 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:15.926514 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:15.926523 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:15.926528 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:15.929407 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:16.422716 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:16.422737 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:16.422754 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:16.422758 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:16.425822 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:16.427005 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:16.427026 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:16.427036 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:16.427043 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:16.429719 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:16.921932 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:16.921955 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:16.921964 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:16.921969 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:16.926099 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:16.927585 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:16.927637 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:16.927674 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:16.927691 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:16.934767 2131642 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:59:16.935497 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:17.422231 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:17.422303 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:17.422327 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:17.422346 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:17.425886 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:17.426814 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:17.426880 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:17.426914 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:17.426937 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:17.430113 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:17.922353 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:17.922423 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:17.922446 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:17.922466 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:17.927200 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:17.928182 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:17.928202 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:17.928211 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:17.928215 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:17.931362 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:18.421995 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:18.422014 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:18.422023 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:18.422028 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:18.425677 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:18.426506 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:18.426526 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:18.426539 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:18.426573 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:18.429547 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:18.921971 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:18.921995 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:18.922004 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:18.922016 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:18.925211 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:18.925925 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:18.925943 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:18.925952 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:18.925956 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:18.928769 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:19.422554 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:19.422575 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:19.422585 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:19.422589 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:19.425620 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:19.426454 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:19.426474 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:19.426483 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:19.426488 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:19.429389 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:19.430112 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:19.922901 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:19.922923 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:19.922932 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:19.922935 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:19.926226 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:19.927160 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:19.927177 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:19.927186 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:19.927191 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:19.930294 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:20.422578 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:20.422600 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:20.422609 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:20.422614 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:20.425810 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:20.426643 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:20.426662 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:20.426726 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:20.426738 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:20.429350 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:20.922888 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:20.922913 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:20.922924 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:20.922928 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:20.926029 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:20.926882 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:20.926899 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:20.926908 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:20.926913 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:20.929722 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:21.422415 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:21.422437 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:21.422446 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:21.422455 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:21.425767 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:21.426529 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:21.426557 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:21.426567 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:21.426573 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:21.429255 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:21.922417 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:21.922441 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:21.922451 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:21.922456 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:21.925593 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:21.926393 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:21.926413 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:21.926422 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:21.926429 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:21.929285 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:21.930091 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:22.421975 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:22.421994 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:22.422005 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:22.422009 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:22.425115 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:22.426009 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:22.426044 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:22.426054 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:22.426060 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:22.428761 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:22.921922 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:22.921950 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:22.921961 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:22.921965 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:22.925658 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:22.926448 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:22.926470 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:22.926480 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:22.926483 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:22.929856 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:23.422389 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:23.422410 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:23.422420 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:23.422426 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:23.425884 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:23.426667 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:23.426729 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:23.426740 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:23.426745 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:23.429784 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:23.922034 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:23.922085 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:23.922095 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:23.922100 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:23.925350 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:23.926131 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:23.926151 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:23.926161 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:23.926165 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:23.929289 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:23.930279 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:24.421983 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:24.422026 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:24.422037 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:24.422041 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:24.425592 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:24.426544 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:24.426572 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:24.426582 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:24.426588 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:24.429691 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:24.922424 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:24.922450 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:24.922459 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:24.922463 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:24.925623 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:24.926380 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:24.926398 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:24.926407 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:24.926414 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:24.929637 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:25.422805 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:25.422832 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:25.422843 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:25.422848 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:25.430128 2131642 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:59:25.431350 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:25.431376 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:25.431386 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:25.431408 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:25.434247 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:25.922371 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:25.922395 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:25.922412 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:25.922416 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:25.925829 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:25.926661 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:25.926770 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:25.926786 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:25.926791 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:25.929404 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:26.422541 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:26.422564 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:26.422574 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:26.422578 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:26.426721 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:26.427825 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:26.427843 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:26.427852 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:26.427856 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:26.435492 2131642 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 10:59:26.435985 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:26.922448 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:26.922474 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:26.922484 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:26.922489 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:26.925710 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:26.926537 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:26.926559 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:26.926567 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:26.926572 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:26.929406 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:27.422805 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:27.422824 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:27.422834 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:27.422844 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:27.426214 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:27.427137 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:27.427156 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:27.427165 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:27.427171 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:27.429813 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:27.921972 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:27.921994 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:27.922004 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:27.922009 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:27.926102 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:27.926868 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:27.926932 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:27.926941 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:27.926945 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:27.930009 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:28.421959 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:28.421983 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:28.421992 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:28.421998 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:28.425220 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:28.426057 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:28.426076 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:28.426085 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:28.426090 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:28.429022 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:28.922416 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:28.922439 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:28.922449 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:28.922453 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:28.925739 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:28.926557 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:28.926577 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:28.926587 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:28.926593 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:28.929611 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:28.930366 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:29.421996 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:29.422047 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:29.422057 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:29.422061 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:29.425542 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:29.426328 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:29.426377 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:29.426393 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:29.426400 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:29.429351 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:29.921960 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:29.921984 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:29.921992 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:29.921997 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:29.925308 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:29.926186 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:29.926206 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:29.926215 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:29.926218 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:29.929085 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:30.421926 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:30.421947 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:30.421956 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:30.421960 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:30.425363 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:30.426204 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:30.426224 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:30.426235 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:30.426239 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:30.429316 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:30.922830 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:30.922851 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:30.922861 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:30.922866 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:30.929165 2131642 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 10:59:30.930061 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:30.930079 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:30.930088 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:30.930092 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:30.932789 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:30.933400 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:31.422415 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:31.422440 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:31.422450 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:31.422455 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:31.425642 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:31.426661 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:31.426719 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:31.426730 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:31.426736 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:31.429481 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:31.922592 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:31.922621 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:31.922632 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:31.922636 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:31.926147 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:31.926981 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:31.926998 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:31.927007 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:31.927011 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:31.930107 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:32.422593 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:32.422619 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:32.422631 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.422637 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.425973 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:32.426809 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:32.426860 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:32.426877 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.426882 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.429727 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:32.922853 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:32.922878 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:32.922887 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.922892 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.926422 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:32.927498 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:32.927522 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:32.927533 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:32.927540 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:32.930395 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:33.422066 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:33.422089 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:33.422098 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:33.422103 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:33.425314 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:33.426212 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:33.426233 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:33.426242 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:33.426247 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:33.428998 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:33.430035 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:33.922815 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:33.922838 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:33.922847 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:33.922851 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:33.925965 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:33.926762 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:33.926778 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:33.926787 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:33.926792 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:33.929591 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:34.422907 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:34.422936 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:34.422947 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:34.422952 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:34.426279 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:34.427233 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:34.427254 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:34.427264 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:34.427268 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:34.430365 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:34.922390 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:34.922409 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:34.922423 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:34.922429 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:34.935654 2131642 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 10:59:34.936417 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:34.936430 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:34.936439 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:34.936445 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:34.942032 2131642 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 10:59:35.422820 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:35.422847 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:35.422857 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:35.422863 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:35.426121 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:35.427176 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:35.427197 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:35.427207 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:35.427213 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:35.430253 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:35.430903 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:35.922655 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:35.922775 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:35.922790 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:35.922798 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:35.925821 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:35.926881 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:35.926901 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:35.926910 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:35.926916 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:35.929622 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:36.422361 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:36.422401 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:36.422410 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:36.422414 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:36.425503 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:36.426338 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:36.426359 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:36.426369 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:36.426374 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:36.429100 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:36.921960 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:36.921984 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:36.921993 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:36.921998 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:36.925032 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:36.925712 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:36.925730 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:36.925740 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:36.925744 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:36.928394 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:37.422469 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:37.422495 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:37.422505 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:37.422509 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:37.425703 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:37.426504 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:37.426536 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:37.426545 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:37.426550 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:37.429432 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:37.922425 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:37.922450 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:37.922460 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:37.922466 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:37.927292 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:37.928017 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:37.928029 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:37.928038 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:37.928041 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:37.931115 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:37.931963 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:38.422444 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:38.422479 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:38.422490 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:38.422495 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:38.425788 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:38.426566 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:38.426585 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:38.426595 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:38.426601 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:38.429271 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:38.922368 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:38.922391 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:38.922401 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:38.922406 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:38.925692 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:38.926468 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:38.926486 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:38.926495 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:38.926499 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:38.929552 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:39.422035 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:39.422078 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:39.422089 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:39.422094 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:39.425201 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:39.426049 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:39.426073 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:39.426082 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:39.426086 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:39.428803 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:39.922602 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:39.922631 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:39.922641 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:39.922645 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:39.925947 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:39.926836 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:39.926860 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:39.926872 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:39.926877 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:39.930269 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:40.422818 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:40.422843 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:40.422853 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:40.422859 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:40.426025 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:40.426906 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:40.426926 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:40.426935 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:40.426940 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:40.429759 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:40.430363 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:40.921902 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:40.921929 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:40.921940 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:40.921945 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:40.925247 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:40.926180 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:40.926201 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:40.926210 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:40.926216 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:40.929348 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:41.422850 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:41.422872 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:41.422882 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:41.422888 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:41.427181 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:41.427943 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:41.427962 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:41.427971 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:41.427976 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:41.430780 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:41.921911 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:41.921944 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:41.921954 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:41.921959 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:41.925031 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:41.925707 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:41.925724 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:41.925733 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:41.925738 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:41.928510 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:42.422535 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:42.422559 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:42.422569 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:42.422574 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:42.425628 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:42.426457 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:42.426479 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:42.426489 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:42.426492 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:42.429215 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:42.922093 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:42.922120 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:42.922130 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:42.922135 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:42.925448 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:42.926350 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:42.926371 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:42.926382 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:42.926385 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:42.929141 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:42.929616 2131642 pod_ready.go:103] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"False"
	I0916 10:59:43.422308 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:43.422332 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.422341 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.422348 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.425587 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:43.426432 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:43.426452 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.426461 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.426465 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.429522 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:43.922877 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-2l4br
	I0916 10:59:43.922901 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.922911 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.922915 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.926218 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:43.927007 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:43.927020 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.927028 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.927032 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.930357 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:43.931025 2131642 pod_ready.go:93] pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:43.931049 2131642 pod_ready.go:82] duration metric: took 36.009323753s for pod "coredns-7c65d6cfc9-2l4br" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:43.931062 2131642 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:43.931135 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vqj8q
	I0916 10:59:43.931146 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.931155 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.931160 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.934208 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:43.935116 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:43.935136 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.935146 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.935152 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.938171 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:43.938998 2131642 pod_ready.go:93] pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:43.939016 2131642 pod_ready.go:82] duration metric: took 7.946996ms for pod "coredns-7c65d6cfc9-vqj8q" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:43.939047 2131642 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:43.939129 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759
	I0916 10:59:43.939139 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.939147 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.939151 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.942176 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:43.943149 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:43.943173 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.943182 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.943185 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.946051 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:43.946825 2131642 pod_ready.go:93] pod "etcd-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:43.946845 2131642 pod_ready.go:82] duration metric: took 7.785388ms for pod "etcd-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:43.946856 2131642 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:43.946921 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m02
	I0916 10:59:43.946933 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.946942 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.946947 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.949614 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:43.950501 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:59:43.950521 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.950530 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.950535 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.953521 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:43.954259 2131642 pod_ready.go:93] pod "etcd-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:43.954280 2131642 pod_ready.go:82] duration metric: took 7.4173ms for pod "etcd-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:43.954292 2131642 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:43.954394 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/etcd-ha-234759-m03
	I0916 10:59:43.954403 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.954412 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.954420 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.958745 2131642 round_trippers.go:574] Response Status: 404 Not Found in 4 milliseconds
	I0916 10:59:43.958881 2131642 pod_ready.go:98] error getting pod "etcd-ha-234759-m03" in "kube-system" namespace (skipping!): pods "etcd-ha-234759-m03" not found
	I0916 10:59:43.958907 2131642 pod_ready.go:82] duration metric: took 4.577902ms for pod "etcd-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	E0916 10:59:43.958922 2131642 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "etcd-ha-234759-m03" in "kube-system" namespace (skipping!): pods "etcd-ha-234759-m03" not found
	I0916 10:59:43.958943 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:43.959012 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759
	I0916 10:59:43.959023 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:43.959031 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:43.959038 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:43.962575 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:44.123639 2131642 request.go:632] Waited for 160.360523ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:44.123745 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:44.123758 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:44.123767 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:44.123772 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:44.126974 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:44.127854 2131642 pod_ready.go:93] pod "kube-apiserver-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:44.127876 2131642 pod_ready.go:82] duration metric: took 168.922228ms for pod "kube-apiserver-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:44.127891 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:44.323380 2131642 request.go:632] Waited for 195.400645ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:59:44.323463 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m02
	I0916 10:59:44.323472 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:44.323481 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:44.323491 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:44.326582 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:44.523958 2131642 request.go:632] Waited for 196.120536ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:59:44.524032 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:59:44.524046 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:44.524055 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:44.524061 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:44.527062 2131642 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 10:59:44.528025 2131642 pod_ready.go:93] pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:44.528049 2131642 pod_ready.go:82] duration metric: took 400.14916ms for pod "kube-apiserver-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:44.528078 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:44.723464 2131642 request.go:632] Waited for 195.314164ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:59:44.723522 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-234759-m03
	I0916 10:59:44.723545 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:44.723561 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:44.723567 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:44.726589 2131642 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 10:59:44.726830 2131642 pod_ready.go:98] error getting pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace (skipping!): pods "kube-apiserver-ha-234759-m03" not found
	I0916 10:59:44.726853 2131642 pod_ready.go:82] duration metric: took 198.764423ms for pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	E0916 10:59:44.726865 2131642 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "kube-apiserver-ha-234759-m03" in "kube-system" namespace (skipping!): pods "kube-apiserver-ha-234759-m03" not found
	I0916 10:59:44.726880 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:44.923258 2131642 request.go:632] Waited for 196.260482ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:59:44.923322 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759
	I0916 10:59:44.923342 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:44.923351 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:44.923358 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:44.926558 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:45.124941 2131642 request.go:632] Waited for 197.464153ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:45.125113 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:45.125164 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:45.125202 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:45.125238 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:45.143538 2131642 round_trippers.go:574] Response Status: 200 OK in 18 milliseconds
	I0916 10:59:45.144217 2131642 pod_ready.go:93] pod "kube-controller-manager-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:45.144278 2131642 pod_ready.go:82] duration metric: took 417.364778ms for pod "kube-controller-manager-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:45.144309 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:45.323572 2131642 request.go:632] Waited for 179.144234ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:59:45.323724 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m02
	I0916 10:59:45.323755 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:45.323785 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:45.323803 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:45.328115 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:45.523251 2131642 request.go:632] Waited for 194.316932ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:59:45.523315 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:59:45.523321 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:45.523332 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:45.523339 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:45.526455 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:45.527159 2131642 pod_ready.go:93] pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:45.527181 2131642 pod_ready.go:82] duration metric: took 382.851159ms for pod "kube-controller-manager-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:45.527194 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:45.723545 2131642 request.go:632] Waited for 196.267882ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:59:45.723685 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-234759-m03
	I0916 10:59:45.723699 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:45.723709 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:45.723715 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:45.726895 2131642 round_trippers.go:574] Response Status: 404 Not Found in 3 milliseconds
	I0916 10:59:45.727096 2131642 pod_ready.go:98] error getting pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace (skipping!): pods "kube-controller-manager-ha-234759-m03" not found
	I0916 10:59:45.727141 2131642 pod_ready.go:82] duration metric: took 199.93853ms for pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	E0916 10:59:45.727167 2131642 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "kube-controller-manager-ha-234759-m03" in "kube-system" namespace (skipping!): pods "kube-controller-manager-ha-234759-m03" not found
	I0916 10:59:45.727190 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:45.923595 2131642 request.go:632] Waited for 196.315119ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:59:45.923656 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-f4jm2
	I0916 10:59:45.923666 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:45.923675 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:45.923681 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:45.926825 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:46.123316 2131642 request.go:632] Waited for 195.500698ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:59:46.123394 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:59:46.123401 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:46.123410 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:46.123416 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:46.126622 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:46.127288 2131642 pod_ready.go:93] pod "kube-proxy-f4jm2" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:46.127309 2131642 pod_ready.go:82] duration metric: took 400.0883ms for pod "kube-proxy-f4jm2" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:46.127322 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:46.323721 2131642 request.go:632] Waited for 196.334696ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:59:46.323780 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gwdl4
	I0916 10:59:46.323787 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:46.323795 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:46.323804 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:46.327319 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:46.523358 2131642 request.go:632] Waited for 195.340763ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:46.523437 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:46.523443 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:46.523452 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:46.523458 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:46.526724 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:46.527434 2131642 pod_ready.go:93] pod "kube-proxy-gwdl4" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:46.527460 2131642 pod_ready.go:82] duration metric: took 400.126871ms for pod "kube-proxy-gwdl4" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:46.527473 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-m84xg" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:46.723315 2131642 request.go:632] Waited for 195.724951ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:59:46.723382 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-m84xg
	I0916 10:59:46.723388 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:46.723397 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:46.723404 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:46.726845 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:46.922969 2131642 request.go:632] Waited for 195.300698ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:59:46.923038 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m04
	I0916 10:59:46.923044 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:46.923055 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:46.923072 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:46.926286 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:46.926816 2131642 pod_ready.go:93] pod "kube-proxy-m84xg" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:46.926832 2131642 pod_ready.go:82] duration metric: took 399.351941ms for pod "kube-proxy-m84xg" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:46.926843 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:47.123221 2131642 request.go:632] Waited for 196.2869ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:59:47.123300 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-qrdxc
	I0916 10:59:47.123311 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:47.123320 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:47.123326 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:47.126262 2131642 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 10:59:47.126381 2131642 pod_ready.go:98] error getting pod "kube-proxy-qrdxc" in "kube-system" namespace (skipping!): pods "kube-proxy-qrdxc" not found
	I0916 10:59:47.126400 2131642 pod_ready.go:82] duration metric: took 199.550649ms for pod "kube-proxy-qrdxc" in "kube-system" namespace to be "Ready" ...
	E0916 10:59:47.126410 2131642 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "kube-proxy-qrdxc" in "kube-system" namespace (skipping!): pods "kube-proxy-qrdxc" not found
	I0916 10:59:47.126425 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:47.323625 2131642 request.go:632] Waited for 197.126806ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:59:47.323698 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759
	I0916 10:59:47.323704 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:47.323713 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:47.323718 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:47.326859 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:47.523854 2131642 request.go:632] Waited for 196.38208ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:47.523923 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759
	I0916 10:59:47.523947 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:47.523956 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:47.523964 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:47.527245 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:47.528070 2131642 pod_ready.go:93] pod "kube-scheduler-ha-234759" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:47.528094 2131642 pod_ready.go:82] duration metric: took 401.657349ms for pod "kube-scheduler-ha-234759" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:47.528105 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:47.723594 2131642 request.go:632] Waited for 195.423306ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:59:47.723654 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m02
	I0916 10:59:47.723660 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:47.723669 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:47.723675 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:47.726909 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:47.922994 2131642 request.go:632] Waited for 195.244032ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:59:47.923158 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes/ha-234759-m02
	I0916 10:59:47.923184 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:47.923206 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:47.923240 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:47.927358 2131642 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 10:59:47.928028 2131642 pod_ready.go:93] pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace has status "Ready":"True"
	I0916 10:59:47.928050 2131642 pod_ready.go:82] duration metric: took 399.93647ms for pod "kube-scheduler-ha-234759-m02" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:47.928062 2131642 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	I0916 10:59:48.123530 2131642 request.go:632] Waited for 195.363457ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:59:48.123597 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-234759-m03
	I0916 10:59:48.123604 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:48.123613 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:48.123620 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:48.127156 2131642 round_trippers.go:574] Response Status: 404 Not Found in 3 milliseconds
	I0916 10:59:48.127307 2131642 pod_ready.go:98] error getting pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace (skipping!): pods "kube-scheduler-ha-234759-m03" not found
	I0916 10:59:48.127320 2131642 pod_ready.go:82] duration metric: took 199.250769ms for pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace to be "Ready" ...
	E0916 10:59:48.127331 2131642 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "kube-scheduler-ha-234759-m03" in "kube-system" namespace (skipping!): pods "kube-scheduler-ha-234759-m03" not found
	I0916 10:59:48.127340 2131642 pod_ready.go:39] duration metric: took 40.224287134s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 10:59:48.127355 2131642 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 10:59:48.127421 2131642 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:59:48.141099 2131642 system_svc.go:56] duration metric: took 13.72381ms WaitForService to wait for kubelet
	I0916 10:59:48.141131 2131642 kubeadm.go:582] duration metric: took 40.360945246s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 10:59:48.141184 2131642 node_conditions.go:102] verifying NodePressure condition ...
	I0916 10:59:48.323551 2131642 request.go:632] Waited for 182.27816ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.49.2:8443/api/v1/nodes
	I0916 10:59:48.323638 2131642 round_trippers.go:463] GET https://192.168.49.2:8443/api/v1/nodes
	I0916 10:59:48.323651 2131642 round_trippers.go:469] Request Headers:
	I0916 10:59:48.323661 2131642 round_trippers.go:473]     Accept: application/json, */*
	I0916 10:59:48.323667 2131642 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 10:59:48.327413 2131642 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 10:59:48.328711 2131642 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:59:48.328741 2131642 node_conditions.go:123] node cpu capacity is 2
	I0916 10:59:48.328754 2131642 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:59:48.328759 2131642 node_conditions.go:123] node cpu capacity is 2
	I0916 10:59:48.328763 2131642 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 10:59:48.328767 2131642 node_conditions.go:123] node cpu capacity is 2
	I0916 10:59:48.328772 2131642 node_conditions.go:105] duration metric: took 187.581606ms to run NodePressure ...
	I0916 10:59:48.328783 2131642 start.go:241] waiting for startup goroutines ...
	I0916 10:59:48.328804 2131642 start.go:255] writing updated cluster config ...
	I0916 10:59:48.329142 2131642 ssh_runner.go:195] Run: rm -f paused
	I0916 10:59:48.338076 2131642 out.go:177] * Done! kubectl is now configured to use "ha-234759" cluster and "default" namespace by default
	E0916 10:59:48.340470 2131642 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	c61312a09b661       2f6c962e7b831       44 seconds ago       Running             coredns                   2                   ceeb850b33772       coredns-7c65d6cfc9-vqj8q
	c6497535f1efc       2f6c962e7b831       45 seconds ago       Running             coredns                   2                   b454612a4cd16       coredns-7c65d6cfc9-2l4br
	79ab90be1d702       89a35e2ebb6b9       45 seconds ago       Running             busybox                   2                   2508c871e2ee1       busybox-7dff88458-kjr9x
	31b1572c29557       ba04bb24b9575       45 seconds ago       Exited              storage-provisioner       3                   e51bc4186db75       storage-provisioner
	cb1762879be7a       6a23fa8fd2b78       45 seconds ago       Running             kindnet-cni               2                   92e888006cdeb       kindnet-q8nl6
	f09770af5fb13       24a140c548c07       45 seconds ago       Running             kube-proxy                2                   b198595f99b91       kube-proxy-gwdl4
	50b2d7cc02f05       279f381cb3736       About a minute ago   Running             kube-controller-manager   2                   fd5f9b2024511       kube-controller-manager-ha-234759
	6ff5ec00d2669       7e2a4e229620b       About a minute ago   Running             kube-vip                  1                   bc0952609f42a       kube-vip-ha-234759
	cd4cd33e1fe07       d3f53a98c0a9d       About a minute ago   Running             kube-apiserver            2                   f109c845ee9f0       kube-apiserver-ha-234759
	e9cd978d969ac       7f8aa378bb47d       About a minute ago   Running             kube-scheduler            2                   83519cbee13e9       kube-scheduler-ha-234759
	d757d9490703e       27e3830e14027       About a minute ago   Running             etcd                      2                   c6e81ac288058       etcd-ha-234759
	41e6b84968e22       2f6c962e7b831       3 minutes ago        Exited              coredns                   1                   30fba837e76d0       coredns-7c65d6cfc9-vqj8q
	184672af3370a       89a35e2ebb6b9       3 minutes ago        Exited              busybox                   1                   f0ac92849bc1a       busybox-7dff88458-kjr9x
	d108cfac8fd4f       2f6c962e7b831       3 minutes ago        Exited              coredns                   1                   c40853737334c       coredns-7c65d6cfc9-2l4br
	9806835e6e060       6a23fa8fd2b78       3 minutes ago        Exited              kindnet-cni               1                   31ebd5494900f       kindnet-q8nl6
	e617f849070ed       24a140c548c07       3 minutes ago        Exited              kube-proxy                1                   43f22af3432c4       kube-proxy-gwdl4
	d4bb975eec45b       7f8aa378bb47d       3 minutes ago        Exited              kube-scheduler            1                   34a94e86a2890       kube-scheduler-ha-234759
	bfcc891f8e795       7e2a4e229620b       3 minutes ago        Exited              kube-vip                  0                   6997a47264a16       kube-vip-ha-234759
	b43574d882fbc       27e3830e14027       3 minutes ago        Exited              etcd                      1                   9b720087da357       etcd-ha-234759
	63b795aabed00       279f381cb3736       3 minutes ago        Exited              kube-controller-manager   1                   1c3a264f753d8       kube-controller-manager-ha-234759
	2b3270e2806d2       d3f53a98c0a9d       3 minutes ago        Exited              kube-apiserver            1                   194a5ade9b5b7       kube-apiserver-ha-234759
	
	
	==> containerd <==
	Sep 16 10:59:05 ha-234759 containerd[577]: time="2024-09-16T10:59:05.175120437Z" level=info msg="StartContainer for \"f09770af5fb13f0526867eaf0a963a06449582a58a5a3538aee9cc37a3db5257\" returns successfully"
	Sep 16 10:59:05 ha-234759 containerd[577]: time="2024-09-16T10:59:05.203975308Z" level=info msg="StartContainer for \"cb1762879be7a052c953d3f07070a09a9b8684654cd6870b8e6c2a3258154fbc\" returns successfully"
	Sep 16 10:59:05 ha-234759 containerd[577]: time="2024-09-16T10:59:05.248133628Z" level=info msg="CreateContainer within sandbox \"b454612a4cd169ed4881487dfd0d073216e6c5d0dcc8fa3e045ecb015b78ab08\" for &ContainerMetadata{Name:coredns,Attempt:2,} returns container id \"c6497535f1efc05bfa0e3f88cbe95f470c4f35a1b410ede6e69fd447f79d7468\""
	Sep 16 10:59:05 ha-234759 containerd[577]: time="2024-09-16T10:59:05.279862967Z" level=info msg="StartContainer for \"c6497535f1efc05bfa0e3f88cbe95f470c4f35a1b410ede6e69fd447f79d7468\""
	Sep 16 10:59:05 ha-234759 containerd[577]: time="2024-09-16T10:59:05.493276183Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-7c65d6cfc9-vqj8q,Uid:2c14618b-0831-4e8b-be9e-ba0049031bdb,Namespace:kube-system,Attempt:2,} returns sandbox id \"ceeb850b337725fd29f3c0a5789193aefa6e2b07fd0310eb4f8fb849e36e7fc8\""
	Sep 16 10:59:05 ha-234759 containerd[577]: time="2024-09-16T10:59:05.524956898Z" level=info msg="CreateContainer within sandbox \"ceeb850b337725fd29f3c0a5789193aefa6e2b07fd0310eb4f8fb849e36e7fc8\" for container &ContainerMetadata{Name:coredns,Attempt:2,}"
	Sep 16 10:59:05 ha-234759 containerd[577]: time="2024-09-16T10:59:05.649282163Z" level=info msg="StartContainer for \"31b1572c29557604197c1865c65736cce94b873b784ad2cbc6e7f5bfe801707c\" returns successfully"
	Sep 16 10:59:05 ha-234759 containerd[577]: time="2024-09-16T10:59:05.707270854Z" level=info msg="CreateContainer within sandbox \"ceeb850b337725fd29f3c0a5789193aefa6e2b07fd0310eb4f8fb849e36e7fc8\" for &ContainerMetadata{Name:coredns,Attempt:2,} returns container id \"c61312a09b661bc5a98c74618e4e818b007f409cdef02e7d297263cc773791a9\""
	Sep 16 10:59:05 ha-234759 containerd[577]: time="2024-09-16T10:59:05.722295502Z" level=info msg="StartContainer for \"c61312a09b661bc5a98c74618e4e818b007f409cdef02e7d297263cc773791a9\""
	Sep 16 10:59:05 ha-234759 containerd[577]: time="2024-09-16T10:59:05.937353742Z" level=info msg="StartContainer for \"c6497535f1efc05bfa0e3f88cbe95f470c4f35a1b410ede6e69fd447f79d7468\" returns successfully"
	Sep 16 10:59:06 ha-234759 containerd[577]: time="2024-09-16T10:59:06.022597008Z" level=info msg="StartContainer for \"79ab90be1d70250cf27c79f1612ee97db93916aefc25c58cea0e44d9279cccd6\" returns successfully"
	Sep 16 10:59:06 ha-234759 containerd[577]: time="2024-09-16T10:59:06.183747803Z" level=info msg="StartContainer for \"c61312a09b661bc5a98c74618e4e818b007f409cdef02e7d297263cc773791a9\" returns successfully"
	Sep 16 10:59:35 ha-234759 containerd[577]: time="2024-09-16T10:59:35.890975109Z" level=info msg="shim disconnected" id=31b1572c29557604197c1865c65736cce94b873b784ad2cbc6e7f5bfe801707c namespace=k8s.io
	Sep 16 10:59:35 ha-234759 containerd[577]: time="2024-09-16T10:59:35.891037820Z" level=warning msg="cleaning up after shim disconnected" id=31b1572c29557604197c1865c65736cce94b873b784ad2cbc6e7f5bfe801707c namespace=k8s.io
	Sep 16 10:59:35 ha-234759 containerd[577]: time="2024-09-16T10:59:35.891048093Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 10:59:35 ha-234759 containerd[577]: time="2024-09-16T10:59:35.991536392Z" level=info msg="RemoveContainer for \"35d6358938dea6788cef8042038c69dcd63a9de42f599cfc4b7797f2edc50f67\""
	Sep 16 10:59:36 ha-234759 containerd[577]: time="2024-09-16T10:59:36.002952469Z" level=info msg="RemoveContainer for \"35d6358938dea6788cef8042038c69dcd63a9de42f599cfc4b7797f2edc50f67\" returns successfully"
	Sep 16 10:59:38 ha-234759 containerd[577]: time="2024-09-16T10:59:38.504062992Z" level=info msg="StopPodSandbox for \"58d0e7de3b6eea0c0975d0a5d62a1bf749d5e7762cf1aeff37e6c952729a280c\""
	Sep 16 10:59:38 ha-234759 containerd[577]: time="2024-09-16T10:59:38.504177552Z" level=info msg="TearDown network for sandbox \"58d0e7de3b6eea0c0975d0a5d62a1bf749d5e7762cf1aeff37e6c952729a280c\" successfully"
	Sep 16 10:59:38 ha-234759 containerd[577]: time="2024-09-16T10:59:38.504190696Z" level=info msg="StopPodSandbox for \"58d0e7de3b6eea0c0975d0a5d62a1bf749d5e7762cf1aeff37e6c952729a280c\" returns successfully"
	Sep 16 10:59:38 ha-234759 containerd[577]: time="2024-09-16T10:59:38.504502999Z" level=info msg="RemovePodSandbox for \"58d0e7de3b6eea0c0975d0a5d62a1bf749d5e7762cf1aeff37e6c952729a280c\""
	Sep 16 10:59:38 ha-234759 containerd[577]: time="2024-09-16T10:59:38.504530453Z" level=info msg="Forcibly stopping sandbox \"58d0e7de3b6eea0c0975d0a5d62a1bf749d5e7762cf1aeff37e6c952729a280c\""
	Sep 16 10:59:38 ha-234759 containerd[577]: time="2024-09-16T10:59:38.504587298Z" level=info msg="TearDown network for sandbox \"58d0e7de3b6eea0c0975d0a5d62a1bf749d5e7762cf1aeff37e6c952729a280c\" successfully"
	Sep 16 10:59:38 ha-234759 containerd[577]: time="2024-09-16T10:59:38.510238016Z" level=warning msg="Failed to get podSandbox status for container event for sandboxID \"58d0e7de3b6eea0c0975d0a5d62a1bf749d5e7762cf1aeff37e6c952729a280c\": an error occurred when try to find sandbox: not found. Sending the event with nil podSandboxStatus."
	Sep 16 10:59:38 ha-234759 containerd[577]: time="2024-09-16T10:59:38.510379250Z" level=info msg="RemovePodSandbox \"58d0e7de3b6eea0c0975d0a5d62a1bf749d5e7762cf1aeff37e6c952729a280c\" returns successfully"
	
	
	==> coredns [41e6b84968e22184a2af0ae6cec735fddcf84294f353b02cf1533037a27300a2] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:44068 - 37292 "HINFO IN 6390764944663340044.6258342243358673423. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.017645978s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1734096306]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.155) (total time: 30000ms):
	Trace[1734096306]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:57:04.156)
	Trace[1734096306]: [30.000588643s] [30.000588643s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1653382229]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.156) (total time: 30000ms):
	Trace[1653382229]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:57:04.156)
	Trace[1653382229]: [30.000405859s] [30.000405859s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[126696912]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.155) (total time: 30001ms):
	Trace[126696912]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:57:04.156)
	Trace[126696912]: [30.001097645s] [30.001097645s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> coredns [c61312a09b661bc5a98c74618e4e818b007f409cdef02e7d297263cc773791a9] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:46425 - 15138 "HINFO IN 3049917123831606054.9189861399765994312. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.005854434s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1505978698]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:59:06.248) (total time: 30002ms):
	Trace[1505978698]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30002ms (10:59:36.251)
	Trace[1505978698]: [30.002465247s] [30.002465247s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[2087225275]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:59:06.250) (total time: 30000ms):
	Trace[2087225275]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:59:36.251)
	Trace[2087225275]: [30.00087688s] [30.00087688s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1320910141]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:59:06.251) (total time: 30000ms):
	Trace[1320910141]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:59:36.251)
	Trace[1320910141]: [30.000498568s] [30.000498568s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> coredns [c6497535f1efc05bfa0e3f88cbe95f470c4f35a1b410ede6e69fd447f79d7468] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:41648 - 52031 "HINFO IN 5590441155244517907.852065605070078171. udp 56 false 512" NXDOMAIN qr,rd,ra 56 0.038861725s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1144960510]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:59:05.974) (total time: 30000ms):
	Trace[1144960510]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:59:35.974)
	Trace[1144960510]: [30.000215015s] [30.000215015s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1646799217]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:59:05.973) (total time: 30000ms):
	Trace[1646799217]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:59:35.974)
	Trace[1646799217]: [30.000908239s] [30.000908239s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1240067345]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:59:05.974) (total time: 30000ms):
	Trace[1240067345]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:59:35.974)
	Trace[1240067345]: [30.000572774s] [30.000572774s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> coredns [d108cfac8fd4ff4eeef092b8bd57d5611a4f5a1e0f6ff03b515dd66901aea1da] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 05e3eaddc414b2d71a69b2e2bc6f2681fc1f4d04bcdd3acc1a41457bb7db518208b95ddfc4c9fffedc59c25a8faf458be1af4915a4a3c0d6777cb7a346bc5d86
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:49668 - 11463 "HINFO IN 672348644181696242.155275422572133107. udp 55 false 512" NXDOMAIN qr,rd,ra 55 0.013472072s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1609807867]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.111) (total time: 30001ms):
	Trace[1609807867]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (10:57:04.112)
	Trace[1609807867]: [30.001308162s] [30.001308162s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1774389382]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.111) (total time: 30000ms):
	Trace[1774389382]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:57:04.112)
	Trace[1774389382]: [30.000964944s] [30.000964944s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1459127489]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 10:56:34.111) (total time: 30001ms):
	Trace[1459127489]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (10:57:04.112)
	Trace[1459127489]: [30.001070585s] [30.001070585s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> describe nodes <==
	Name:               ha-234759
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T10_51_47_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:51:45 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:59:47 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:58:56 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:58:56 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:58:56 +0000   Mon, 16 Sep 2024 10:51:45 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:58:56 +0000   Mon, 16 Sep 2024 10:51:46 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.2
	  Hostname:    ha-234759
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 5687c3d1f5fb49fda6cbed8349daedeb
	  System UUID:                2a58ed5f-69e8-4ab8-a10e-2a95cf1d9dec
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (11 in total)
	  Namespace                   Name                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                 ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-kjr9x              0 (0%)        0 (0%)      0 (0%)           0 (0%)         6m30s
	  kube-system                 coredns-7c65d6cfc9-2l4br             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     7m59s
	  kube-system                 coredns-7c65d6cfc9-vqj8q             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     7m59s
	  kube-system                 etcd-ha-234759                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         8m4s
	  kube-system                 kindnet-q8nl6                        100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      7m59s
	  kube-system                 kube-apiserver-ha-234759             250m (12%)    0 (0%)      0 (0%)           0 (0%)         8m4s
	  kube-system                 kube-controller-manager-ha-234759    200m (10%)    0 (0%)      0 (0%)           0 (0%)         8m4s
	  kube-system                 kube-proxy-gwdl4                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m59s
	  kube-system                 kube-scheduler-ha-234759             100m (5%)     0 (0%)      0 (0%)           0 (0%)         8m4s
	  kube-system                 kube-vip-ha-234759                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m19s
	  kube-system                 storage-provisioner                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m58s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             290Mi (3%)  390Mi (4%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 44s                    kube-proxy       
	  Normal   Starting                 3m16s                  kube-proxy       
	  Normal   Starting                 7m58s                  kube-proxy       
	  Warning  CgroupV1                 8m4s                   kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  8m4s                   kubelet          Node ha-234759 status is now: NodeHasSufficientMemory
	  Normal   NodeReady                8m4s                   kubelet          Node ha-234759 status is now: NodeReady
	  Normal   NodeAllocatableEnforced  8m4s                   kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasNoDiskPressure    8m4s                   kubelet          Node ha-234759 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     8m4s                   kubelet          Node ha-234759 status is now: NodeHasSufficientPID
	  Normal   Starting                 8m4s                   kubelet          Starting kubelet.
	  Normal   RegisteredNode           8m                     node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           7m29s                  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           6m45s                  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           4m39s                  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   NodeHasSufficientPID     3m46s (x7 over 3m46s)  kubelet          Node ha-234759 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  3m46s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   Starting                 3m46s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 3m46s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m46s (x8 over 3m46s)  kubelet          Node ha-234759 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m46s (x7 over 3m46s)  kubelet          Node ha-234759 status is now: NodeHasNoDiskPressure
	  Normal   RegisteredNode           3m30s                  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           3m28s                  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           3m10s                  node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   Starting                 72s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 72s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  72s (x8 over 72s)      kubelet          Node ha-234759 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    72s (x7 over 72s)      kubelet          Node ha-234759 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     72s (x7 over 72s)      kubelet          Node ha-234759 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  72s                    kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           56s                    node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	  Normal   RegisteredNode           54s                    node-controller  Node ha-234759 event: Registered Node ha-234759 in Controller
	
	
	Name:               ha-234759-m02
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_52_13_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:52:10 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:59:41 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:59:00 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:59:00 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:59:00 +0000   Mon, 16 Sep 2024 10:52:10 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:59:00 +0000   Mon, 16 Sep 2024 10:52:11 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.3
	  Hostname:    ha-234759-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 53b9956202664994a2be4d2313ba8d9d
	  System UUID:                ee72b9d9-548d-49fb-8dc5-aa6839abad7f
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-7l4g7                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         6m30s
	  kube-system                 etcd-ha-234759-m02                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         7m38s
	  kube-system                 kindnet-svsnq                            100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      7m40s
	  kube-system                 kube-apiserver-ha-234759-m02             250m (12%)    0 (0%)      0 (0%)           0 (0%)         7m39s
	  kube-system                 kube-controller-manager-ha-234759-m02    200m (10%)    0 (0%)      0 (0%)           0 (0%)         7m38s
	  kube-system                 kube-proxy-f4jm2                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m40s
	  kube-system                 kube-scheduler-ha-234759-m02             100m (5%)     0 (0%)      0 (0%)           0 (0%)         7m39s
	  kube-system                 kube-vip-ha-234759-m02                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         7m35s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (1%)  50Mi (0%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 3m15s                  kube-proxy       
	  Normal   Starting                 7m32s                  kube-proxy       
	  Normal   Starting                 4m28s                  kube-proxy       
	  Normal   Starting                 45s                    kube-proxy       
	  Normal   RegisteredNode           7m40s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   NodeHasSufficientPID     7m40s (x7 over 7m40s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  7m40s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   Starting                 7m40s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 7m40s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  7m40s (x8 over 7m40s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    7m40s (x7 over 7m40s)  kubelet          Node ha-234759-m02 status is now: NodeHasNoDiskPressure
	  Normal   RegisteredNode           7m29s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   RegisteredNode           6m45s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   NodeHasSufficientMemory  4m51s (x8 over 4m51s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasSufficientPID     4m51s (x7 over 4m51s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientPID
	  Normal   Starting                 4m51s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 4m51s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  4m51s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasNoDiskPressure    4m51s (x7 over 4m51s)  kubelet          Node ha-234759-m02 status is now: NodeHasNoDiskPressure
	  Normal   RegisteredNode           4m39s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Warning  CgroupV1                 3m43s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasNoDiskPressure    3m43s (x7 over 3m43s)  kubelet          Node ha-234759-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeAllocatableEnforced  3m43s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   Starting                 3m43s                  kubelet          Starting kubelet.
	  Normal   NodeHasSufficientPID     3m43s (x7 over 3m43s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  3m43s (x8 over 3m43s)  kubelet          Node ha-234759-m02 status is now: NodeHasSufficientMemory
	  Normal   RegisteredNode           3m30s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   RegisteredNode           3m28s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   RegisteredNode           3m10s                  node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   Starting                 70s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 70s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  70s (x8 over 70s)      kubelet          Node ha-234759-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    70s (x7 over 70s)      kubelet          Node ha-234759-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     70s (x7 over 70s)      kubelet          Node ha-234759-m02 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  70s                    kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           56s                    node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	  Normal   RegisteredNode           54s                    node-controller  Node ha-234759-m02 event: Registered Node ha-234759-m02 in Controller
	
	
	Name:               ha-234759-m04
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=ha-234759-m04
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=ha-234759
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T10_54_13_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 10:54:12 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-234759-m04
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 10:59:45 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 10:59:14 +0000   Mon, 16 Sep 2024 10:57:32 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 10:59:14 +0000   Mon, 16 Sep 2024 10:57:32 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 10:59:14 +0000   Mon, 16 Sep 2024 10:57:32 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 10:59:14 +0000   Mon, 16 Sep 2024 10:57:32 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.49.5
	  Hostname:    ha-234759-m04
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 b829c34f42b34714abdfd0a30824dc8e
	  System UUID:                3f4e61b4-061e-4448-a9f3-3c0401d9b215
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.3.0/24
	PodCIDRs:                     10.244.3.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-n5dcg    0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m8s
	  kube-system                 kindnet-lwtj4              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      5m38s
	  kube-system                 kube-proxy-m84xg           0 (0%)        0 (0%)      0 (0%)           0 (0%)         5m38s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 5m36s                  kube-proxy       
	  Normal   Starting                 20s                    kube-proxy       
	  Normal   Starting                 2m11s                  kube-proxy       
	  Normal   NodeHasNoDiskPressure    5m38s (x2 over 5m38s)  kubelet          Node ha-234759-m04 status is now: NodeHasNoDiskPressure
	  Normal   NodeAllocatableEnforced  5m38s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientPID     5m38s (x2 over 5m38s)  kubelet          Node ha-234759-m04 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  5m38s (x2 over 5m38s)  kubelet          Node ha-234759-m04 status is now: NodeHasSufficientMemory
	  Normal   NodeReady                5m37s                  kubelet          Node ha-234759-m04 status is now: NodeReady
	  Normal   RegisteredNode           5m35s                  node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           5m35s                  node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           5m34s                  node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           4m39s                  node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           3m30s                  node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           3m28s                  node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           3m10s                  node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   NodeNotReady             2m50s                  node-controller  Node ha-234759-m04 status is now: NodeNotReady
	  Warning  CgroupV1                 2m30s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  2m30s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   Starting                 2m30s                  kubelet          Starting kubelet.
	  Normal   NodeHasNoDiskPressure    2m24s (x7 over 2m30s)  kubelet          Node ha-234759-m04 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m24s (x7 over 2m30s)  kubelet          Node ha-234759-m04 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  2m18s (x8 over 2m30s)  kubelet          Node ha-234759-m04 status is now: NodeHasSufficientMemory
	  Normal   RegisteredNode           56s                    node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Normal   RegisteredNode           54s                    node-controller  Node ha-234759-m04 event: Registered Node ha-234759-m04 in Controller
	  Warning  CgroupV1                 49s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   Starting                 49s                    kubelet          Starting kubelet.
	  Normal   NodeAllocatableEnforced  48s                    kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasNoDiskPressure    42s (x7 over 49s)      kubelet          Node ha-234759-m04 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     42s (x7 over 49s)      kubelet          Node ha-234759-m04 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  36s (x8 over 49s)      kubelet          Node ha-234759-m04 status is now: NodeHasSufficientMemory
	
	
	==> dmesg <==
	
	
	==> etcd [b43574d882fbc3d7cc3c3eb1f0448a517af58314b6d7ebfedb1cf5aeac8827c8] <==
	{"level":"info","ts":"2024-09-16T10:58:15.837278Z","caller":"traceutil/trace.go:171","msg":"trace[799599524] range","detail":"{range_begin:/registry/health; range_end:; }","duration":"1.99113415s","start":"2024-09-16T10:58:13.846133Z","end":"2024-09-16T10:58:15.837268Z","steps":["trace[799599524] 'agreement among raft nodes before linearized reading'  (duration: 1.991074557s)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:58:15.837843Z","caller":"v3rpc/interceptor.go:197","msg":"request stats","start time":"2024-09-16T10:58:13.846103Z","time spent":"1.991196796s","remote":"127.0.0.1:59158","response type":"/etcdserverpb.KV/Range","request count":0,"request size":18,"response count":0,"response size":0,"request content":"key:\"/registry/health\" "}
	2024/09/16 10:58:15 WARNING: [core] [Server #8] grpc: Server.processUnaryRPC failed to write status: connection error: desc = "transport is closing"
	{"level":"warn","ts":"2024-09-16T10:58:16.323997Z","caller":"etcdserver/v3_server.go:920","msg":"waiting for ReadIndex response took too long, retrying","sent-request-id":8128031932468417135,"retry-timeout":"500ms"}
	{"level":"info","ts":"2024-09-16T10:58:16.547065Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 3"}
	{"level":"info","ts":"2024-09-16T10:58:16.547115Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:58:16.547128Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:58:16.547143Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc [logterm: 3, index: 2582] sent MsgPreVote request to 3b59db4913cc3eb9 at term 3"}
	{"level":"warn","ts":"2024-09-16T10:58:16.824352Z","caller":"etcdserver/v3_server.go:920","msg":"waiting for ReadIndex response took too long, retrying","sent-request-id":8128031932468417135,"retry-timeout":"500ms"}
	{"level":"warn","ts":"2024-09-16T10:58:17.326734Z","caller":"etcdserver/v3_server.go:920","msg":"waiting for ReadIndex response took too long, retrying","sent-request-id":8128031932468417135,"retry-timeout":"500ms"}
	{"level":"warn","ts":"2024-09-16T10:58:17.827035Z","caller":"etcdserver/v3_server.go:920","msg":"waiting for ReadIndex response took too long, retrying","sent-request-id":8128031932468417135,"retry-timeout":"500ms"}
	{"level":"warn","ts":"2024-09-16T10:58:17.851324Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"2.000689217s","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/health\" ","response":"","error":"context deadline exceeded"}
	{"level":"info","ts":"2024-09-16T10:58:17.851398Z","caller":"traceutil/trace.go:171","msg":"trace[2122131564] range","detail":"{range_begin:/registry/health; range_end:; }","duration":"2.00084193s","start":"2024-09-16T10:58:15.850544Z","end":"2024-09-16T10:58:17.851386Z","steps":["trace[2122131564] 'agreement among raft nodes before linearized reading'  (duration: 2.000687174s)"],"step_count":1}
	{"level":"warn","ts":"2024-09-16T10:58:17.851482Z","caller":"v3rpc/interceptor.go:197","msg":"request stats","start time":"2024-09-16T10:58:15.850509Z","time spent":"2.000963324s","remote":"127.0.0.1:59152","response type":"/etcdserverpb.KV/Range","request count":0,"request size":18,"response count":0,"response size":0,"request content":"key:\"/registry/health\" "}
	{"level":"warn","ts":"2024-09-16T10:58:17.876481Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_SNAPSHOT","remote-peer-id":"3b59db4913cc3eb9","rtt":"2.241849ms","error":"dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"warn","ts":"2024-09-16T10:58:17.880054Z","caller":"rafthttp/probing_status.go:68","msg":"prober detected unhealthy status","round-tripper-name":"ROUND_TRIPPER_RAFT_MESSAGE","remote-peer-id":"3b59db4913cc3eb9","rtt":"18.935976ms","error":"dial tcp 192.168.49.3:2380: connect: connection refused"}
	{"level":"info","ts":"2024-09-16T10:58:18.050474Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc is starting a new election at term 3"}
	{"level":"info","ts":"2024-09-16T10:58:18.050544Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became pre-candidate at term 3"}
	{"level":"info","ts":"2024-09-16T10:58:18.050558Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc received MsgPreVoteResp from aec36adc501070cc at term 3"}
	{"level":"info","ts":"2024-09-16T10:58:18.051302Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc [logterm: 3, index: 2582] sent MsgPreVote request to 3b59db4913cc3eb9 at term 3"}
	{"level":"warn","ts":"2024-09-16T10:58:18.257975Z","caller":"v3rpc/interceptor.go:197","msg":"request stats","start time":"2024-09-16T10:58:11.257893Z","time spent":"7.000077705s","remote":"127.0.0.1:59866","response type":"/etcdserverpb.KV/Txn","request count":0,"request size":0,"response count":0,"response size":0,"request content":""}
	{"level":"warn","ts":"2024-09-16T10:58:18.335716Z","caller":"etcdserver/v3_server.go:920","msg":"waiting for ReadIndex response took too long, retrying","sent-request-id":8128031932468417135,"retry-timeout":"500ms"}
	{"level":"warn","ts":"2024-09-16T10:58:18.856745Z","caller":"etcdserver/v3_server.go:920","msg":"waiting for ReadIndex response took too long, retrying","sent-request-id":8128031932468417135,"retry-timeout":"500ms"}
	{"level":"warn","ts":"2024-09-16T10:58:18.856894Z","caller":"v3rpc/interceptor.go:197","msg":"request stats","start time":"2024-09-16T10:58:11.839610Z","time spent":"7.01728033s","remote":"127.0.0.1:59396","response type":"/etcdserverpb.KV/Txn","request count":0,"request size":0,"response count":0,"response size":0,"request content":""}
	{"level":"warn","ts":"2024-09-16T10:58:19.114706Z","caller":"v3rpc/interceptor.go:197","msg":"request stats","start time":"2024-09-16T10:58:12.101326Z","time spent":"7.013373147s","remote":"127.0.0.1:59866","response type":"/etcdserverpb.KV/Txn","request count":0,"request size":0,"response count":0,"response size":0,"request content":""}
	
	
	==> etcd [d757d9490703eb472cc0184037f6e5f096f33afcfa77d8232b0c26be4779d27e] <==
	{"level":"info","ts":"2024-09-16T10:58:45.720051Z","caller":"rafthttp/pipeline.go:85","msg":"stopped HTTP pipelining with remote peer","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:58:45.720130Z","caller":"rafthttp/stream.go:442","msg":"stopped stream reader with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:58:45.720176Z","caller":"rafthttp/stream.go:442","msg":"stopped stream reader with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:58:45.720190Z","caller":"rafthttp/peer.go:335","msg":"stopped remote peer","remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:58:45.720200Z","caller":"rafthttp/transport.go:355","msg":"removed remote peer","local-member-id":"aec36adc501070cc","removed-remote-peer-id":"94f3900974800f10"}
	{"level":"info","ts":"2024-09-16T10:58:45.912199Z","caller":"rafthttp/stream.go:249","msg":"set message encoder","from":"aec36adc501070cc","to":"3b59db4913cc3eb9","stream-type":"stream MsgApp v2"}
	{"level":"info","ts":"2024-09-16T10:58:45.912328Z","caller":"rafthttp/stream.go:274","msg":"established TCP streaming connection with remote peer","stream-writer-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"3b59db4913cc3eb9"}
	{"level":"info","ts":"2024-09-16T10:58:45.926863Z","caller":"rafthttp/stream.go:412","msg":"established TCP streaming connection with remote peer","stream-reader-type":"stream MsgApp v2","local-member-id":"aec36adc501070cc","remote-peer-id":"3b59db4913cc3eb9"}
	{"level":"info","ts":"2024-09-16T10:58:46.205183Z","caller":"rafthttp/stream.go:412","msg":"established TCP streaming connection with remote peer","stream-reader-type":"stream Message","local-member-id":"aec36adc501070cc","remote-peer-id":"3b59db4913cc3eb9"}
	{"level":"info","ts":"2024-09-16T10:58:46.237730Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc [logterm: 3, index: 2582, vote: 3b59db4913cc3eb9] cast MsgPreVote for 3b59db4913cc3eb9 [logterm: 3, index: 2582] at term 3"}
	{"level":"info","ts":"2024-09-16T10:58:46.290796Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc [term: 3] received a MsgVote message with higher term from 3b59db4913cc3eb9 [term: 4]"}
	{"level":"info","ts":"2024-09-16T10:58:46.292002Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc became follower at term 4"}
	{"level":"info","ts":"2024-09-16T10:58:46.292073Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"aec36adc501070cc [logterm: 3, index: 2582, vote: 0] cast MsgVote for 3b59db4913cc3eb9 [logterm: 3, index: 2582] at term 4"}
	{"level":"info","ts":"2024-09-16T10:58:46.314872Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: aec36adc501070cc elected leader 3b59db4913cc3eb9 at term 4"}
	{"level":"info","ts":"2024-09-16T10:58:46.367081Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"aec36adc501070cc","local-member-attributes":"{Name:ha-234759 ClientURLs:[https://192.168.49.2:2379]}","request-path":"/0/members/aec36adc501070cc/attributes","cluster-id":"fa54960ea34d58be","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T10:58:46.377660Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:58:46.378174Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T10:58:46.388589Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T10:58:46.411383Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T10:58:46.389290Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:58:46.412417Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.49.2:2379"}
	{"level":"info","ts":"2024-09-16T10:58:46.413015Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T10:58:46.413829Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"warn","ts":"2024-09-16T10:59:04.057776Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"108.823803ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/services/endpoints/kube-system/kube-dns\" ","response":"range_response_count:1 size:797"}
	{"level":"info","ts":"2024-09-16T10:59:04.057843Z","caller":"traceutil/trace.go:171","msg":"trace[408811029] range","detail":"{range_begin:/registry/services/endpoints/kube-system/kube-dns; range_end:; response_count:1; response_revision:2273; }","duration":"108.906067ms","start":"2024-09-16T10:59:03.948924Z","end":"2024-09-16T10:59:04.057831Z","steps":["trace[408811029] 'agreement among raft nodes before linearized reading'  (duration: 108.732603ms)"],"step_count":1}
	
	
	==> kernel <==
	 10:59:50 up 1 day, 14:42,  0 users,  load average: 3.41, 3.14, 2.22
	Linux ha-234759 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [9806835e6e060a0abfcf806d43a4b9a66b69879aff1b10ccf545e0ee3642321a] <==
	I0916 10:57:44.024974       1 main.go:322] Node ha-234759-m03 has CIDR [10.244.2.0/24] 
	I0916 10:57:44.025194       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:57:44.025211       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:57:44.025254       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:57:44.025270       1 main.go:299] handling current node
	I0916 10:57:44.025283       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:57:44.025293       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:57:54.024387       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:57:54.024677       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:57:54.024906       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:57:54.025010       1 main.go:299] handling current node
	I0916 10:57:54.025096       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:57:54.025190       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:58:04.031025       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:58:04.031133       1 main.go:299] handling current node
	I0916 10:58:04.031170       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:58:04.031206       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:58:04.031392       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:58:04.031409       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:58:14.030786       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:58:14.030911       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:58:14.031143       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:58:14.031195       1 main.go:299] handling current node
	I0916 10:58:14.031241       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:58:14.031288       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	
	
	==> kindnet [cb1762879be7a052c953d3f07070a09a9b8684654cd6870b8e6c2a3258154fbc] <==
	I0916 10:59:35.737743       1 trace.go:236] Trace[739816659]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 10:59:05.732) (total time: 30005ms):
	Trace[739816659]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30005ms (10:59:35.737)
	Trace[739816659]: [30.005615556s] [30.005615556s] END
	E0916 10:59:35.737754       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Pod: failed to list *v1.Pod: Get "https://10.96.0.1:443/api/v1/pods?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 10:59:35.737809       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:59:35.737852       1 trace.go:236] Trace[1306732696]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 10:59:05.732) (total time: 30005ms):
	Trace[1306732696]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30005ms (10:59:35.737)
	Trace[1306732696]: [30.005401993s] [30.005401993s] END
	E0916 10:59:35.737867       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.NetworkPolicy: failed to list *v1.NetworkPolicy: Get "https://10.96.0.1:443/apis/networking.k8s.io/v1/networkpolicies?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	W0916 10:59:35.738036       1 reflector.go:547] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:59:35.738175       1 trace.go:236] Trace[1734503169]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232 (16-Sep-2024 10:59:05.727) (total time: 30010ms):
	Trace[1734503169]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30010ms (10:59:35.737)
	Trace[1734503169]: [30.010319734s] [30.010319734s] END
	E0916 10:59:35.738209       1 reflector.go:150] pkg/mod/k8s.io/client-go@v0.30.3/tools/cache/reflector.go:232: Failed to watch *v1.Node: failed to list *v1.Node: Get "https://10.96.0.1:443/api/v1/nodes?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	I0916 10:59:37.128053       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 10:59:37.128083       1 metrics.go:61] Registering metrics
	I0916 10:59:37.128148       1 controller.go:374] Syncing nftables rules
	I0916 10:59:45.724663       1 main.go:295] Handling node with IPs: map[192.168.49.2:{}]
	I0916 10:59:45.724706       1 main.go:299] handling current node
	I0916 10:59:45.730411       1 main.go:295] Handling node with IPs: map[192.168.49.3:{}]
	I0916 10:59:45.730520       1 main.go:322] Node ha-234759-m02 has CIDR [10.244.1.0/24] 
	I0916 10:59:45.730749       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.1.0/24 Src: <nil> Gw: 192.168.49.3 Flags: [] Table: 0} 
	I0916 10:59:45.730840       1 main.go:295] Handling node with IPs: map[192.168.49.5:{}]
	I0916 10:59:45.730856       1 main.go:322] Node ha-234759-m04 has CIDR [10.244.3.0/24] 
	I0916 10:59:45.730897       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.3.0/24 Src: <nil> Gw: 192.168.49.5 Flags: [] Table: 0} 
	
	
	==> kube-apiserver [2b3270e2806d2a49ec7811927861d7e576fa169288c5b4860691e358f1febfa3] <==
	E0916 10:58:19.188808       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.188827       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.189042       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.192095       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.192218       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.192284       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.192304       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.192325       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.192342       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.192703       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.192732       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.192750       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.192855       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.192875       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.195015       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.195042       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.195059       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.195075       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.195499       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.200164       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.200808       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.200829       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.200844       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.200856       1 watcher.go:342] watch chan error: etcdserver: no leader
	E0916 10:58:19.200866       1 watcher.go:342] watch chan error: etcdserver: no leader
	
	
	==> kube-apiserver [cd4cd33e1fe07356699bc9541b83744263fb2d328315ff16ef02c93b94d5ea11] <==
	I0916 10:58:53.232502       1 apiapproval_controller.go:189] Starting KubernetesAPIApprovalPolicyConformantConditionController
	I0916 10:58:53.232513       1 crd_finalizer.go:269] Starting CRDFinalizer
	I0916 10:58:53.469460       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 10:58:53.469501       1 policy_source.go:224] refreshing policies
	I0916 10:58:53.488163       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 10:58:53.505037       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 10:58:53.505390       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 10:58:53.505658       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 10:58:53.506435       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 10:58:53.506520       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 10:58:53.506564       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 10:58:53.506870       1 aggregator.go:171] initial CRD sync complete...
	I0916 10:58:53.506891       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 10:58:53.506897       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 10:58:53.506904       1 cache.go:39] Caches are synced for autoregister controller
	I0916 10:58:53.511302       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 10:58:53.523551       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 10:58:53.523856       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 10:58:53.524374       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	W0916 10:58:53.600171       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.3]
	I0916 10:58:53.602643       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 10:58:53.616693       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	E0916 10:58:53.620726       1 controller.go:95] Found stale data, removed previous endpoints on kubernetes service, apiserver didn't exit successfully previously
	I0916 10:58:54.226777       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	W0916 10:58:55.036669       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.49.2 192.168.49.3]
	
	
	==> kube-controller-manager [50b2d7cc02f05ce3fe5f838266276f662d16f76c46f3032e470731010bd79b4e] <==
	I0916 10:59:06.975700       1 event.go:377] Event(v1.ObjectReference{Kind:"Service", Namespace:"kube-system", Name:"kube-dns", UID:"2767a06f-f40b-42fe-bba8-c52234ffe17b", APIVersion:"v1", ResourceVersion:"245", FieldPath:""}): type: 'Warning' reason: 'FailedToUpdateEndpointSlices' Error updating Endpoint Slices for Service kube-system/kube-dns: failed to update kube-dns-kbqtc EndpointSlice for Service kube-system/kube-dns: Operation cannot be fulfilled on endpointslices.discovery.k8s.io "kube-dns-kbqtc": the object has been modified; please apply your changes to the latest version and try again
	I0916 10:59:07.086201       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="59.133819ms"
	I0916 10:59:07.086411       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="94.391µs"
	I0916 10:59:14.796432       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	E0916 10:59:16.663936       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	E0916 10:59:16.664001       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	E0916 10:59:16.664008       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	E0916 10:59:16.664015       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	E0916 10:59:16.664020       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	E0916 10:59:16.664025       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	I0916 10:59:17.019904       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="70.905898ms"
	I0916 10:59:17.020080       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="70.285µs"
	I0916 10:59:18.110910       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="160.122µs"
	I0916 10:59:34.181131       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="34.616954ms"
	I0916 10:59:34.181307       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="133.776µs"
	I0916 10:59:34.917298       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="65.904µs"
	I0916 10:59:34.921898       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="93.3µs"
	I0916 10:59:43.675846       1 endpointslice_controller.go:344] "Error syncing endpoint slices for service, retrying" logger="endpointslice-controller" key="kube-system/kube-dns" err="failed to update kube-dns-kbqtc EndpointSlice for Service kube-system/kube-dns: Operation cannot be fulfilled on endpointslices.discovery.k8s.io \"kube-dns-kbqtc\": the object has been modified; please apply your changes to the latest version and try again"
	I0916 10:59:43.676540       1 event.go:377] Event(v1.ObjectReference{Kind:"Service", Namespace:"kube-system", Name:"kube-dns", UID:"2767a06f-f40b-42fe-bba8-c52234ffe17b", APIVersion:"v1", ResourceVersion:"245", FieldPath:""}): type: 'Warning' reason: 'FailedToUpdateEndpointSlices' Error updating Endpoint Slices for Service kube-system/kube-dns: failed to update kube-dns-kbqtc EndpointSlice for Service kube-system/kube-dns: Operation cannot be fulfilled on endpointslices.discovery.k8s.io "kube-dns-kbqtc": the object has been modified; please apply your changes to the latest version and try again
	I0916 10:59:43.783452       1 endpointslice_controller.go:344] "Error syncing endpoint slices for service, retrying" logger="endpointslice-controller" key="kube-system/kube-dns" err="failed to update kube-dns-kbqtc EndpointSlice for Service kube-system/kube-dns: Operation cannot be fulfilled on endpointslices.discovery.k8s.io \"kube-dns-kbqtc\": the object has been modified; please apply your changes to the latest version and try again"
	I0916 10:59:43.784012       1 event.go:377] Event(v1.ObjectReference{Kind:"Service", Namespace:"kube-system", Name:"kube-dns", UID:"2767a06f-f40b-42fe-bba8-c52234ffe17b", APIVersion:"v1", ResourceVersion:"245", FieldPath:""}): type: 'Warning' reason: 'FailedToUpdateEndpointSlices' Error updating Endpoint Slices for Service kube-system/kube-dns: failed to update kube-dns-kbqtc EndpointSlice for Service kube-system/kube-dns: Operation cannot be fulfilled on endpointslices.discovery.k8s.io "kube-dns-kbqtc": the object has been modified; please apply your changes to the latest version and try again
	I0916 10:59:43.787780       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="172.321985ms"
	E0916 10:59:43.788198       1 replica_set.go:560] "Unhandled Error" err="sync \"kube-system/coredns-7c65d6cfc9\" failed with Operation cannot be fulfilled on replicasets.apps \"coredns-7c65d6cfc9\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:59:43.790571       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="97.829µs"
	I0916 10:59:43.796545       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="105.296µs"
	
	
	==> kube-controller-manager [63b795aabed00b1c546bdfc8a236583357fe63341f1c55f86a8e1bf68afb7aee] <==
	I0916 10:57:11.935294       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="142.178µs"
	I0916 10:57:32.962054       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:57:32.963438       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="ha-234759-m04"
	I0916 10:57:32.985902       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:57:35.523111       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	I0916 10:57:41.939510       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	I0916 10:57:41.950177       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	I0916 10:57:42.393560       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="380.098147ms"
	E0916 10:57:42.393854       1 replica_set.go:560] "Unhandled Error" err="sync \"default/busybox-7dff88458\" failed with Operation cannot be fulfilled on replicasets.apps \"busybox-7dff88458\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0916 10:57:42.438427       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="38.300406ms"
	I0916 10:57:42.523104       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="84.621504ms"
	I0916 10:57:42.523227       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="82.986µs"
	I0916 10:57:44.258033       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="48.976µs"
	I0916 10:57:45.271583       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="31.413745ms"
	I0916 10:57:45.271768       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="116.824µs"
	I0916 10:57:46.391801       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m03"
	I0916 10:57:46.393901       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="ha-234759-m04"
	E0916 10:57:46.574496       1 garbagecollector.go:399] "Unhandled Error" err="error syncing item &garbagecollector.node{identity:garbagecollector.objectReference{OwnerReference:v1.OwnerReference{APIVersion:\"storage.k8s.io/v1\", Kind:\"CSINode\", Name:\"ha-234759-m03\", UID:\"0a40139b-8a68-4e34-b51d-18b2a74252fd\", Controller:(*bool)(nil), BlockOwnerDeletion:(*bool)(nil)}, Namespace:\"\"}, dependentsLock:sync.RWMutex{w:sync.Mutex{state:0, sema:0x0}, writerSem:0x0, readerSem:0x0, readerCount:atomic.Int32{_:atomic.noCopy{}, v:1}, readerWait:atomic.Int32{_:atomic.noCopy{}, v:0}}, dependents:map[*garbagecollector.node]struct {}{}, deletingDependents:false, deletingDependentsLock:sync.RWMutex{w:sync.Mutex{state:0, sema:0x0}, writerSem:0x0, readerSem:0x0, readerCount:atomic.Int32{_:atomic.noCopy{}, v:0}, readerWait:atomic.Int32{_:atomic.noCopy{}, v:0}}, beingDeleted:false, beingDeletedLock:sync.RWMutex{w:sync.Mutex{state:0, sema:0x0}, writerSem:0x0, readerSem:0x0, readerCount:atomic.Int32{_:atomic.noCopy{}
, v:0}, readerWait:atomic.Int32{_:atomic.noCopy{}, v:0}}, virtual:false, virtualLock:sync.RWMutex{w:sync.Mutex{state:0, sema:0x0}, writerSem:0x0, readerSem:0x0, readerCount:atomic.Int32{_:atomic.noCopy{}, v:0}, readerWait:atomic.Int32{_:atomic.noCopy{}, v:0}}, owners:[]v1.OwnerReference{v1.OwnerReference{APIVersion:\"v1\", Kind:\"Node\", Name:\"ha-234759-m03\", UID:\"f54fa22f-9520-483c-a3ac-d5fc9a1607e6\", Controller:(*bool)(nil), BlockOwnerDeletion:(*bool)(nil)}}}: csinodes.storage.k8s.io \"ha-234759-m03\" not found" logger="UnhandledError"
	I0916 10:57:53.653316       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="ha-234759-m04"
	E0916 10:58:02.498931       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	E0916 10:58:02.498982       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	E0916 10:58:02.498990       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	E0916 10:58:02.498996       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	E0916 10:58:02.499008       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	E0916 10:58:02.499016       1 gc_controller.go:151] "Failed to get node" err="node \"ha-234759-m03\" not found" logger="pod-garbage-collector-controller" node="ha-234759-m03"
	
	
	==> kube-proxy [e617f849070edf4c16b86d0792dd8cbf9f27eacef983569f922b9f407467c670] <==
	I0916 10:56:33.475454       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:56:33.972451       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:56:33.972524       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:56:34.173072       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:56:34.173415       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:56:34.179384       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:56:34.179904       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:56:34.186538       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:56:34.188077       1 config.go:199] "Starting service config controller"
	I0916 10:56:34.188310       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:56:34.189069       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:56:34.189177       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:56:34.189890       1 config.go:328] "Starting node config controller"
	I0916 10:56:34.190035       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:56:34.289961       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:56:34.290244       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:56:34.290649       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [f09770af5fb13f0526867eaf0a963a06449582a58a5a3538aee9cc37a3db5257] <==
	I0916 10:59:05.537646       1 server_linux.go:66] "Using iptables proxy"
	I0916 10:59:06.068400       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.49.2"]
	E0916 10:59:06.068541       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 10:59:06.283541       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 10:59:06.283602       1 server_linux.go:169] "Using iptables Proxier"
	I0916 10:59:06.291690       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 10:59:06.292036       1 server.go:483] "Version info" version="v1.31.1"
	I0916 10:59:06.292052       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:59:06.294857       1 config.go:199] "Starting service config controller"
	I0916 10:59:06.294894       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 10:59:06.294917       1 config.go:105] "Starting endpoint slice config controller"
	I0916 10:59:06.294921       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 10:59:06.299020       1 config.go:328] "Starting node config controller"
	I0916 10:59:06.299047       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 10:59:06.395685       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 10:59:06.395928       1 shared_informer.go:320] Caches are synced for service config
	I0916 10:59:06.399166       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [d4bb975eec45b54011799a4101f9ba6709586b66878d5c3e3073998608e29857] <==
	I0916 10:56:17.913920       1 serving.go:386] Generated self-signed cert in-memory
	I0916 10:56:19.486557       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:56:19.486821       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:56:19.493410       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 10:56:19.493459       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 10:56:19.493514       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:56:19.493552       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:56:19.493570       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 10:56:19.493580       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:56:19.494208       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:56:19.494420       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:56:19.593680       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:56:19.593683       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 10:56:19.593707       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	E0916 10:57:42.130426       1 framework.go:1305] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-n5dcg\": pod busybox-7dff88458-n5dcg is already assigned to node \"ha-234759-m04\"" plugin="DefaultBinder" pod="default/busybox-7dff88458-n5dcg" node="ha-234759-m04"
	E0916 10:57:42.132308       1 schedule_one.go:348] "scheduler cache ForgetPod failed" err="pod d46d3e7a-cb65-4ad3-a038-da0384151c20(default/busybox-7dff88458-n5dcg) wasn't assumed so cannot be forgotten" pod="default/busybox-7dff88458-n5dcg"
	E0916 10:57:42.132381       1 schedule_one.go:1057] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-7dff88458-n5dcg\": pod busybox-7dff88458-n5dcg is already assigned to node \"ha-234759-m04\"" pod="default/busybox-7dff88458-n5dcg"
	I0916 10:57:42.132403       1 schedule_one.go:1070] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-7dff88458-n5dcg" node="ha-234759-m04"
	
	
	==> kube-scheduler [e9cd978d969ac8990ccc65ebc7e4c9a39de840b663c32b7f129eead0b59424c3] <==
	I0916 10:58:52.401703       1 serving.go:386] Generated self-signed cert in-memory
	I0916 10:58:54.836955       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 10:58:54.836993       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 10:58:54.847173       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 10:58:54.847274       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 10:58:54.847447       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 10:58:54.847507       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 10:58:54.847521       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 10:58:54.847528       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:58:54.848911       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 10:58:54.848988       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 10:58:54.947535       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 10:58:54.947599       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 10:58:54.947812       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 10:59:02 ha-234759 kubelet[674]: E0916 10:59:02.116885     674 kubelet_node_status.go:453] "Error getting the current node from lister" err="node \"ha-234759\" not found"
	Sep 16 10:59:02 ha-234759 kubelet[674]: E0916 10:59:02.217122     674 kubelet_node_status.go:453] "Error getting the current node from lister" err="node \"ha-234759\" not found"
	Sep 16 10:59:02 ha-234759 kubelet[674]: E0916 10:59:02.318078     674 kubelet_node_status.go:453] "Error getting the current node from lister" err="node \"ha-234759\" not found"
	Sep 16 10:59:02 ha-234759 kubelet[674]: E0916 10:59:02.418883     674 kubelet_node_status.go:453] "Error getting the current node from lister" err="node \"ha-234759\" not found"
	Sep 16 10:59:02 ha-234759 kubelet[674]: E0916 10:59:02.519580     674 kubelet_node_status.go:453] "Error getting the current node from lister" err="node \"ha-234759\" not found"
	Sep 16 10:59:02 ha-234759 kubelet[674]: E0916 10:59:02.620136     674 kubelet_node_status.go:453] "Error getting the current node from lister" err="node \"ha-234759\" not found"
	Sep 16 10:59:03 ha-234759 kubelet[674]: I0916 10:59:03.172008     674 apiserver.go:52] "Watching apiserver"
	Sep 16 10:59:03 ha-234759 kubelet[674]: I0916 10:59:03.267269     674 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world"
	Sep 16 10:59:03 ha-234759 kubelet[674]: I0916 10:59:03.298496     674 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/e8924914-9ba5-4adc-ac46-9d3d97b0bc08-tmp\") pod \"storage-provisioner\" (UID: \"e8924914-9ba5-4adc-ac46-9d3d97b0bc08\") " pod="kube-system/storage-provisioner"
	Sep 16 10:59:03 ha-234759 kubelet[674]: I0916 10:59:03.301372     674 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/ee79653f-8a9e-41e2-82bb-7b08fc87e265-xtables-lock\") pod \"kindnet-q8nl6\" (UID: \"ee79653f-8a9e-41e2-82bb-7b08fc87e265\") " pod="kube-system/kindnet-q8nl6"
	Sep 16 10:59:03 ha-234759 kubelet[674]: I0916 10:59:03.301526     674 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/8ea118a7-cc54-4dd9-8bb2-cfc133a376fc-xtables-lock\") pod \"kube-proxy-gwdl4\" (UID: \"8ea118a7-cc54-4dd9-8bb2-cfc133a376fc\") " pod="kube-system/kube-proxy-gwdl4"
	Sep 16 10:59:03 ha-234759 kubelet[674]: I0916 10:59:03.301620     674 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8ea118a7-cc54-4dd9-8bb2-cfc133a376fc-lib-modules\") pod \"kube-proxy-gwdl4\" (UID: \"8ea118a7-cc54-4dd9-8bb2-cfc133a376fc\") " pod="kube-system/kube-proxy-gwdl4"
	Sep 16 10:59:03 ha-234759 kubelet[674]: I0916 10:59:03.301725     674 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ee79653f-8a9e-41e2-82bb-7b08fc87e265-lib-modules\") pod \"kindnet-q8nl6\" (UID: \"ee79653f-8a9e-41e2-82bb-7b08fc87e265\") " pod="kube-system/kindnet-q8nl6"
	Sep 16 10:59:03 ha-234759 kubelet[674]: I0916 10:59:03.301841     674 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-cfg\" (UniqueName: \"kubernetes.io/host-path/ee79653f-8a9e-41e2-82bb-7b08fc87e265-cni-cfg\") pod \"kindnet-q8nl6\" (UID: \"ee79653f-8a9e-41e2-82bb-7b08fc87e265\") " pod="kube-system/kindnet-q8nl6"
	Sep 16 10:59:03 ha-234759 kubelet[674]: I0916 10:59:03.351629     674 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 10:59:08 ha-234759 kubelet[674]: E0916 10:59:08.294948     674 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 10:59:08 ha-234759 kubelet[674]: E0916 10:59:08.295003     674 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 10:59:18 ha-234759 kubelet[674]: E0916 10:59:18.323165     674 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 10:59:18 ha-234759 kubelet[674]: E0916 10:59:18.323238     674 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 10:59:28 ha-234759 kubelet[674]: E0916 10:59:28.346145     674 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 10:59:28 ha-234759 kubelet[674]: E0916 10:59:28.346823     674 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 10:59:35 ha-234759 kubelet[674]: I0916 10:59:35.986032     674 scope.go:117] "RemoveContainer" containerID="35d6358938dea6788cef8042038c69dcd63a9de42f599cfc4b7797f2edc50f67"
	Sep 16 10:59:35 ha-234759 kubelet[674]: I0916 10:59:35.986390     674 scope.go:117] "RemoveContainer" containerID="31b1572c29557604197c1865c65736cce94b873b784ad2cbc6e7f5bfe801707c"
	Sep 16 10:59:35 ha-234759 kubelet[674]: E0916 10:59:35.986552     674 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"storage-provisioner\" with CrashLoopBackOff: \"back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(e8924914-9ba5-4adc-ac46-9d3d97b0bc08)\"" pod="kube-system/storage-provisioner" podUID="e8924914-9ba5-4adc-ac46-9d3d97b0bc08"
	Sep 16 10:59:51 ha-234759 kubelet[674]: I0916 10:59:51.190222     674 scope.go:117] "RemoveContainer" containerID="31b1572c29557604197c1865c65736cce94b873b784ad2cbc6e7f5bfe801707c"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p ha-234759 -n ha-234759
helpers_test.go:261: (dbg) Run:  kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (589.29µs)
helpers_test.go:263: kubectl --context ha-234759 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiControlPlane/serial/RestartCluster (81.26s)

                                                
                                    
x
+
TestMultiNode/serial/MultiNodeLabels (2.63s)

                                                
                                                
=== RUN   TestMultiNode/serial/MultiNodeLabels
multinode_test.go:221: (dbg) Run:  kubectl --context multinode-890146 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]"
multinode_test.go:221: (dbg) Non-zero exit: kubectl --context multinode-890146 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]": fork/exec /usr/local/bin/kubectl: exec format error (1.397695ms)
multinode_test.go:223: failed to 'kubectl get nodes' with args "kubectl --context multinode-890146 get nodes -o \"jsonpath=[{range .items[*]}{.metadata.labels},{end}]\"": fork/exec /usr/local/bin/kubectl: exec format error
multinode_test.go:230: failed to decode json from label list: args "kubectl --context multinode-890146 get nodes -o \"jsonpath=[{range .items[*]}{.metadata.labels},{end}]\"": unexpected end of JSON input
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiNode/serial/MultiNodeLabels]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect multinode-890146
helpers_test.go:235: (dbg) docker inspect multinode-890146:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb",
	        "Created": "2024-09-16T11:07:09.881207881Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2176022,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:07:10.021938387Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/hostname",
	        "HostsPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/hosts",
	        "LogPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb-json.log",
	        "Name": "/multinode-890146",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "multinode-890146:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "multinode-890146",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/merged",
	                "UpperDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/diff",
	                "WorkDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "multinode-890146",
	                "Source": "/var/lib/docker/volumes/multinode-890146/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "multinode-890146",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "multinode-890146",
	                "name.minikube.sigs.k8s.io": "multinode-890146",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "435c30f14c38000575965d33c99ca27815d0d91f5250deffde5cddcb8e65dca9",
	            "SandboxKey": "/var/run/docker/netns/435c30f14c38",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40717"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40718"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40721"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40719"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40720"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "multinode-890146": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.58.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:3a:02",
	                    "DriverOpts": null,
	                    "NetworkID": "b138f637362d33b7ccebcd9c06d6cdaa35c434cdf582fc761f98e8246e8681cc",
	                    "EndpointID": "1ef64936ec7ebb2090965d084803451acee533d9e380198f704779360ea5dcdb",
	                    "Gateway": "192.168.58.1",
	                    "IPAddress": "192.168.58.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "multinode-890146",
	                        "d045dde36e30"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p multinode-890146 -n multinode-890146
helpers_test.go:244: <<< TestMultiNode/serial/MultiNodeLabels FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/MultiNodeLabels]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p multinode-890146 logs -n 25: (1.537248825s)
helpers_test.go:252: TestMultiNode/serial/MultiNodeLabels logs: 
-- stdout --
	
	==> Audit <==
	|---------|---------------------------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| Command |                       Args                        |       Profile        |  User   | Version |     Start Time      |      End Time       |
	|---------|---------------------------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| ssh     | mount-start-2-972638 ssh -- ls                    | mount-start-2-972638 | jenkins | v1.34.0 | 16 Sep 24 11:06 UTC | 16 Sep 24 11:06 UTC |
	|         | /minikube-host                                    |                      |         |         |                     |                     |
	| delete  | -p mount-start-1-970382                           | mount-start-1-970382 | jenkins | v1.34.0 | 16 Sep 24 11:06 UTC | 16 Sep 24 11:06 UTC |
	|         | --alsologtostderr -v=5                            |                      |         |         |                     |                     |
	| ssh     | mount-start-2-972638 ssh -- ls                    | mount-start-2-972638 | jenkins | v1.34.0 | 16 Sep 24 11:06 UTC | 16 Sep 24 11:06 UTC |
	|         | /minikube-host                                    |                      |         |         |                     |                     |
	| stop    | -p mount-start-2-972638                           | mount-start-2-972638 | jenkins | v1.34.0 | 16 Sep 24 11:06 UTC | 16 Sep 24 11:06 UTC |
	| start   | -p mount-start-2-972638                           | mount-start-2-972638 | jenkins | v1.34.0 | 16 Sep 24 11:06 UTC | 16 Sep 24 11:07 UTC |
	| ssh     | mount-start-2-972638 ssh -- ls                    | mount-start-2-972638 | jenkins | v1.34.0 | 16 Sep 24 11:07 UTC | 16 Sep 24 11:07 UTC |
	|         | /minikube-host                                    |                      |         |         |                     |                     |
	| delete  | -p mount-start-2-972638                           | mount-start-2-972638 | jenkins | v1.34.0 | 16 Sep 24 11:07 UTC | 16 Sep 24 11:07 UTC |
	| delete  | -p mount-start-1-970382                           | mount-start-1-970382 | jenkins | v1.34.0 | 16 Sep 24 11:07 UTC | 16 Sep 24 11:07 UTC |
	| start   | -p multinode-890146                               | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:07 UTC | 16 Sep 24 11:08 UTC |
	|         | --wait=true --memory=2200                         |                      |         |         |                     |                     |
	|         | --nodes=2 -v=8                                    |                      |         |         |                     |                     |
	|         | --alsologtostderr                                 |                      |         |         |                     |                     |
	|         | --driver=docker                                   |                      |         |         |                     |                     |
	|         | --container-runtime=containerd                    |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- apply -f                   | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | ./testdata/multinodes/multinode-pod-dns-test.yaml |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- rollout                    | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | status deployment/busybox                         |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- get pods -o                | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- get pods -o                | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- exec                       | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | busybox-7dff88458-hf6zl --                        |                      |         |         |                     |                     |
	|         | nslookup kubernetes.io                            |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- exec                       | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | busybox-7dff88458-wrnfh --                        |                      |         |         |                     |                     |
	|         | nslookup kubernetes.io                            |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- exec                       | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | busybox-7dff88458-hf6zl --                        |                      |         |         |                     |                     |
	|         | nslookup kubernetes.default                       |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- exec                       | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | busybox-7dff88458-wrnfh --                        |                      |         |         |                     |                     |
	|         | nslookup kubernetes.default                       |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- exec                       | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | busybox-7dff88458-hf6zl -- nslookup               |                      |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- exec                       | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | busybox-7dff88458-wrnfh -- nslookup               |                      |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- get pods -o                | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- exec                       | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | busybox-7dff88458-hf6zl                           |                      |         |         |                     |                     |
	|         | -- sh -c nslookup                                 |                      |         |         |                     |                     |
	|         | host.minikube.internal | awk                      |                      |         |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3                           |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- exec                       | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | busybox-7dff88458-hf6zl -- sh                     |                      |         |         |                     |                     |
	|         | -c ping -c 1 192.168.58.1                         |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- exec                       | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | busybox-7dff88458-wrnfh                           |                      |         |         |                     |                     |
	|         | -- sh -c nslookup                                 |                      |         |         |                     |                     |
	|         | host.minikube.internal | awk                      |                      |         |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3                           |                      |         |         |                     |                     |
	| kubectl | -p multinode-890146 -- exec                       | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | busybox-7dff88458-wrnfh -- sh                     |                      |         |         |                     |                     |
	|         | -c ping -c 1 192.168.58.1                         |                      |         |         |                     |                     |
	| node    | add -p multinode-890146 -v 3                      | multinode-890146     | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | --alsologtostderr                                 |                      |         |         |                     |                     |
	|---------|---------------------------------------------------|----------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:07:04
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:07:04.710961 2175536 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:07:04.711406 2175536 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:07:04.711441 2175536 out.go:358] Setting ErrFile to fd 2...
	I0916 11:07:04.711461 2175536 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:07:04.711736 2175536 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 11:07:04.712191 2175536 out.go:352] Setting JSON to false
	I0916 11:07:04.713227 2175536 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":139767,"bootTime":1726345058,"procs":190,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 11:07:04.713322 2175536 start.go:139] virtualization:  
	I0916 11:07:04.716025 2175536 out.go:177] * [multinode-890146] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:07:04.718481 2175536 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:07:04.718608 2175536 notify.go:220] Checking for updates...
	I0916 11:07:04.722309 2175536 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:07:04.724604 2175536 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:07:04.726247 2175536 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 11:07:04.728060 2175536 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:07:04.730011 2175536 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:07:04.732069 2175536 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:07:04.754782 2175536 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:07:04.754909 2175536 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:07:04.815407 2175536 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:41 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 11:07:04.805742665 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:07:04.815518 2175536 docker.go:318] overlay module found
	I0916 11:07:04.817918 2175536 out.go:177] * Using the docker driver based on user configuration
	I0916 11:07:04.819852 2175536 start.go:297] selected driver: docker
	I0916 11:07:04.819879 2175536 start.go:901] validating driver "docker" against <nil>
	I0916 11:07:04.819896 2175536 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:07:04.820612 2175536 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:07:04.882378 2175536 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:41 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 11:07:04.872960095 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:07:04.882594 2175536 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 11:07:04.882857 2175536 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:07:04.884899 2175536 out.go:177] * Using Docker driver with root privileges
	I0916 11:07:04.886500 2175536 cni.go:84] Creating CNI manager for ""
	I0916 11:07:04.886562 2175536 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0916 11:07:04.886576 2175536 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 11:07:04.886657 2175536 start.go:340] cluster config:
	{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP:
SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:07:04.890071 2175536 out.go:177] * Starting "multinode-890146" primary control-plane node in "multinode-890146" cluster
	I0916 11:07:04.891719 2175536 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:07:04.893462 2175536 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:07:04.895451 2175536 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:07:04.895512 2175536 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 11:07:04.895524 2175536 cache.go:56] Caching tarball of preloaded images
	I0916 11:07:04.895532 2175536 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:07:04.895606 2175536 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 11:07:04.895616 2175536 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 11:07:04.895985 2175536 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:07:04.896016 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json: {Name:mk1dc06c1476fc6d0ac1387b52d1026d9e3527d7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	W0916 11:07:04.915293 2175536 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:07:04.915318 2175536 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:07:04.915390 2175536 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:07:04.915415 2175536 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:07:04.915427 2175536 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:07:04.915435 2175536 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:07:04.915440 2175536 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:07:04.916723 2175536 image.go:273] response: 
	I0916 11:07:05.038443 2175536 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:07:05.038485 2175536 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:07:05.038517 2175536 start.go:360] acquireMachinesLock for multinode-890146: {Name:mk50282545d8a591b3d758c5d48e2059a356819d Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:07:05.038643 2175536 start.go:364] duration metric: took 103.442µs to acquireMachinesLock for "multinode-890146"
	I0916 11:07:05.038698 2175536 start.go:93] Provisioning new machine with config: &{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetri
cs:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:07:05.038886 2175536 start.go:125] createHost starting for "" (driver="docker")
	I0916 11:07:05.041773 2175536 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 11:07:05.042034 2175536 start.go:159] libmachine.API.Create for "multinode-890146" (driver="docker")
	I0916 11:07:05.042067 2175536 client.go:168] LocalClient.Create starting
	I0916 11:07:05.042152 2175536 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 11:07:05.042187 2175536 main.go:141] libmachine: Decoding PEM data...
	I0916 11:07:05.042204 2175536 main.go:141] libmachine: Parsing certificate...
	I0916 11:07:05.042263 2175536 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 11:07:05.042288 2175536 main.go:141] libmachine: Decoding PEM data...
	I0916 11:07:05.042303 2175536 main.go:141] libmachine: Parsing certificate...
	I0916 11:07:05.042733 2175536 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 11:07:05.058191 2175536 cli_runner.go:211] docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 11:07:05.058277 2175536 network_create.go:284] running [docker network inspect multinode-890146] to gather additional debugging logs...
	I0916 11:07:05.058297 2175536 cli_runner.go:164] Run: docker network inspect multinode-890146
	W0916 11:07:05.074895 2175536 cli_runner.go:211] docker network inspect multinode-890146 returned with exit code 1
	I0916 11:07:05.074951 2175536 network_create.go:287] error running [docker network inspect multinode-890146]: docker network inspect multinode-890146: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network multinode-890146 not found
	I0916 11:07:05.074966 2175536 network_create.go:289] output of [docker network inspect multinode-890146]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network multinode-890146 not found
	
	** /stderr **
	I0916 11:07:05.075080 2175536 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:07:05.091698 2175536 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-941929ec13d1 IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:02:42:32:84:fe:19} reservation:<nil>}
	I0916 11:07:05.092436 2175536 network.go:206] using free private subnet 192.168.58.0/24: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001874180}
	I0916 11:07:05.092465 2175536 network_create.go:124] attempt to create docker network multinode-890146 192.168.58.0/24 with gateway 192.168.58.1 and MTU of 1500 ...
	I0916 11:07:05.092523 2175536 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.58.0/24 --gateway=192.168.58.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=multinode-890146 multinode-890146
	I0916 11:07:05.169697 2175536 network_create.go:108] docker network multinode-890146 192.168.58.0/24 created
	I0916 11:07:05.169733 2175536 kic.go:121] calculated static IP "192.168.58.2" for the "multinode-890146" container
	I0916 11:07:05.169807 2175536 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:07:05.187546 2175536 cli_runner.go:164] Run: docker volume create multinode-890146 --label name.minikube.sigs.k8s.io=multinode-890146 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:07:05.204604 2175536 oci.go:103] Successfully created a docker volume multinode-890146
	I0916 11:07:05.204713 2175536 cli_runner.go:164] Run: docker run --rm --name multinode-890146-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-890146 --entrypoint /usr/bin/test -v multinode-890146:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:07:05.765543 2175536 oci.go:107] Successfully prepared a docker volume multinode-890146
	I0916 11:07:05.765593 2175536 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:07:05.765614 2175536 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:07:05.765685 2175536 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-890146:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:07:09.815871 2175536 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-890146:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.050143051s)
	I0916 11:07:09.815905 2175536 kic.go:203] duration metric: took 4.050287182s to extract preloaded images to volume ...
	W0916 11:07:09.816058 2175536 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:07:09.816175 2175536 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:07:09.867064 2175536 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname multinode-890146 --name multinode-890146 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-890146 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=multinode-890146 --network multinode-890146 --ip 192.168.58.2 --volume multinode-890146:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:07:10.200352 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Running}}
	I0916 11:07:10.227902 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:10.250171 2175536 cli_runner.go:164] Run: docker exec multinode-890146 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:07:10.322219 2175536 oci.go:144] the created container "multinode-890146" has a running status.
	I0916 11:07:10.322248 2175536 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa...
	I0916 11:07:11.953902 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 11:07:11.953955 2175536 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:07:11.973438 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:11.989923 2175536 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:07:11.989951 2175536 kic_runner.go:114] Args: [docker exec --privileged multinode-890146 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:07:12.043688 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:12.063046 2175536 machine.go:93] provisionDockerMachine start ...
	I0916 11:07:12.063151 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:12.081954 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:07:12.082271 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40717 <nil> <nil>}
	I0916 11:07:12.082287 2175536 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:07:12.222205 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146
	
	I0916 11:07:12.222232 2175536 ubuntu.go:169] provisioning hostname "multinode-890146"
	I0916 11:07:12.222304 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:12.238921 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:07:12.239170 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40717 <nil> <nil>}
	I0916 11:07:12.239187 2175536 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-890146 && echo "multinode-890146" | sudo tee /etc/hostname
	I0916 11:07:12.386518 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146
	
	I0916 11:07:12.386619 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:12.404224 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:07:12.404534 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40717 <nil> <nil>}
	I0916 11:07:12.404566 2175536 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-890146' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-890146/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-890146' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:07:12.551490 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:07:12.551519 2175536 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 11:07:12.551548 2175536 ubuntu.go:177] setting up certificates
	I0916 11:07:12.551557 2175536 provision.go:84] configureAuth start
	I0916 11:07:12.551617 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:07:12.568710 2175536 provision.go:143] copyHostCerts
	I0916 11:07:12.568765 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:07:12.568800 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 11:07:12.568808 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:07:12.568889 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 11:07:12.568965 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:07:12.568981 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 11:07:12.568985 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:07:12.569022 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 11:07:12.569060 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:07:12.569077 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 11:07:12.569081 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:07:12.569108 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 11:07:12.569152 2175536 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.multinode-890146 san=[127.0.0.1 192.168.58.2 localhost minikube multinode-890146]
	I0916 11:07:13.054906 2175536 provision.go:177] copyRemoteCerts
	I0916 11:07:13.054975 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:07:13.055018 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:13.074045 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:13.171530 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:07:13.171589 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 11:07:13.195905 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:07:13.195969 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1216 bytes)
	I0916 11:07:13.219875 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:07:13.219955 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:07:13.244593 2175536 provision.go:87] duration metric: took 693.022182ms to configureAuth
	I0916 11:07:13.244627 2175536 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:07:13.244830 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:07:13.244844 2175536 machine.go:96] duration metric: took 1.181777353s to provisionDockerMachine
	I0916 11:07:13.244852 2175536 client.go:171] duration metric: took 8.202778601s to LocalClient.Create
	I0916 11:07:13.244865 2175536 start.go:167] duration metric: took 8.20283387s to libmachine.API.Create "multinode-890146"
	I0916 11:07:13.244877 2175536 start.go:293] postStartSetup for "multinode-890146" (driver="docker")
	I0916 11:07:13.244887 2175536 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:07:13.244944 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:07:13.244989 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:13.262952 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:13.359972 2175536 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:07:13.363266 2175536 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:07:13.363334 2175536 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:07:13.363347 2175536 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:07:13.363353 2175536 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:07:13.363359 2175536 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:07:13.363363 2175536 command_runner.go:130] > ID=ubuntu
	I0916 11:07:13.363367 2175536 command_runner.go:130] > ID_LIKE=debian
	I0916 11:07:13.363371 2175536 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:07:13.363376 2175536 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:07:13.363384 2175536 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:07:13.363394 2175536 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:07:13.363405 2175536 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:07:13.363458 2175536 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:07:13.363499 2175536 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:07:13.363514 2175536 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:07:13.363522 2175536 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:07:13.363536 2175536 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 11:07:13.363599 2175536 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 11:07:13.363680 2175536 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 11:07:13.363692 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 11:07:13.363806 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:07:13.372570 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:07:13.397437 2175536 start.go:296] duration metric: took 152.543836ms for postStartSetup
	I0916 11:07:13.397802 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:07:13.414034 2175536 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:07:13.414328 2175536 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:07:13.414377 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:13.431292 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:13.523771 2175536 command_runner.go:130] > 21%
	I0916 11:07:13.523863 2175536 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:07:13.528222 2175536 command_runner.go:130] > 154G
	I0916 11:07:13.528717 2175536 start.go:128] duration metric: took 8.489816181s to createHost
	I0916 11:07:13.528744 2175536 start.go:83] releasing machines lock for "multinode-890146", held for 8.490079023s
	I0916 11:07:13.528823 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:07:13.546503 2175536 ssh_runner.go:195] Run: cat /version.json
	I0916 11:07:13.546565 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:13.546570 2175536 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:07:13.546651 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:13.566271 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:13.580236 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:13.789090 2175536 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:07:13.789167 2175536 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 11:07:13.789364 2175536 ssh_runner.go:195] Run: systemctl --version
	I0916 11:07:13.793341 2175536 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 11:07:13.793376 2175536 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 11:07:13.793760 2175536 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:07:13.797426 2175536 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:07:13.797449 2175536 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:07:13.797464 2175536 command_runner.go:130] > Device: 3ch/60d	Inode: 1301117     Links: 1
	I0916 11:07:13.797471 2175536 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:07:13.797478 2175536 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:07:13.797483 2175536 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:07:13.797488 2175536 command_runner.go:130] > Change: 2024-09-16 10:29:56.970000846 +0000
	I0916 11:07:13.797494 2175536 command_runner.go:130] >  Birth: 2024-09-16 10:29:56.970000846 +0000
	I0916 11:07:13.797773 2175536 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 11:07:13.822367 2175536 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:07:13.822480 2175536 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:07:13.850861 2175536 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf, 
	I0916 11:07:13.850919 2175536 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:07:13.850928 2175536 start.go:495] detecting cgroup driver to use...
	I0916 11:07:13.850973 2175536 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:07:13.851029 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 11:07:13.864220 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 11:07:13.875991 2175536 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:07:13.876079 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:07:13.890034 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:07:13.905601 2175536 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:07:13.997977 2175536 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:07:14.107738 2175536 command_runner.go:130] ! Created symlink /etc/systemd/system/cri-docker.service → /dev/null.
	I0916 11:07:14.107771 2175536 docker.go:233] disabling docker service ...
	I0916 11:07:14.107824 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:07:14.130375 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:07:14.142958 2175536 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:07:14.233252 2175536 command_runner.go:130] ! Removed /etc/systemd/system/sockets.target.wants/docker.socket.
	I0916 11:07:14.233395 2175536 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:07:14.338360 2175536 command_runner.go:130] ! Created symlink /etc/systemd/system/docker.service → /dev/null.
	I0916 11:07:14.338546 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:07:14.351624 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:07:14.367588 2175536 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0916 11:07:14.368947 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 11:07:14.379802 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 11:07:14.389657 2175536 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 11:07:14.389778 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 11:07:14.399737 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:07:14.409839 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 11:07:14.419858 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:07:14.429710 2175536 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:07:14.439343 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 11:07:14.449034 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 11:07:14.458507 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 11:07:14.468646 2175536 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:07:14.476639 2175536 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:07:14.477555 2175536 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:07:14.486240 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:07:14.573778 2175536 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 11:07:14.706938 2175536 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 11:07:14.707066 2175536 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 11:07:14.710670 2175536 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0916 11:07:14.710780 2175536 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:07:14.710802 2175536 command_runner.go:130] > Device: 45h/69d	Inode: 175         Links: 1
	I0916 11:07:14.710839 2175536 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:07:14.710863 2175536 command_runner.go:130] > Access: 2024-09-16 11:07:14.656361543 +0000
	I0916 11:07:14.710882 2175536 command_runner.go:130] > Modify: 2024-09-16 11:07:14.656361543 +0000
	I0916 11:07:14.710905 2175536 command_runner.go:130] > Change: 2024-09-16 11:07:14.656361543 +0000
	I0916 11:07:14.710931 2175536 command_runner.go:130] >  Birth: -
	I0916 11:07:14.710991 2175536 start.go:563] Will wait 60s for crictl version
	I0916 11:07:14.711072 2175536 ssh_runner.go:195] Run: which crictl
	I0916 11:07:14.714603 2175536 command_runner.go:130] > /usr/bin/crictl
	I0916 11:07:14.714721 2175536 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:07:14.752930 2175536 command_runner.go:130] > Version:  0.1.0
	I0916 11:07:14.752950 2175536 command_runner.go:130] > RuntimeName:  containerd
	I0916 11:07:14.752972 2175536 command_runner.go:130] > RuntimeVersion:  1.7.22
	I0916 11:07:14.752977 2175536 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:07:14.755746 2175536 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 11:07:14.755815 2175536 ssh_runner.go:195] Run: containerd --version
	I0916 11:07:14.775668 2175536 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:07:14.777251 2175536 ssh_runner.go:195] Run: containerd --version
	I0916 11:07:14.797312 2175536 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:07:14.801130 2175536 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 11:07:14.802981 2175536 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:07:14.818100 2175536 ssh_runner.go:195] Run: grep 192.168.58.1	host.minikube.internal$ /etc/hosts
	I0916 11:07:14.821749 2175536 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.58.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:07:14.832523 2175536 kubeadm.go:883] updating cluster {Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fals
e CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:07:14.832641 2175536 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:07:14.832706 2175536 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:07:14.865240 2175536 command_runner.go:130] > {
	I0916 11:07:14.865259 2175536 command_runner.go:130] >   "images": [
	I0916 11:07:14.865263 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865273 2175536 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:07:14.865278 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865286 2175536 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:07:14.865290 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865294 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865303 2175536 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:07:14.865306 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865310 2175536 command_runner.go:130] >       "size": "33309097",
	I0916 11:07:14.865314 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.865317 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865321 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865324 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865332 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865335 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865345 2175536 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:07:14.865349 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865354 2175536 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:07:14.865357 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865361 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865369 2175536 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:07:14.865372 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865376 2175536 command_runner.go:130] >       "size": "8034419",
	I0916 11:07:14.865379 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.865383 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865386 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865390 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865393 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865395 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865402 2175536 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:07:14.865406 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865413 2175536 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:07:14.865417 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865420 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865428 2175536 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:07:14.865431 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865435 2175536 command_runner.go:130] >       "size": "16948420",
	I0916 11:07:14.865438 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.865442 2175536 command_runner.go:130] >       "username": "nonroot",
	I0916 11:07:14.865446 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865450 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865453 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865456 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865462 2175536 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:07:14.865466 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865471 2175536 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:07:14.865478 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865482 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865490 2175536 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 11:07:14.865493 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865496 2175536 command_runner.go:130] >       "size": "66535646",
	I0916 11:07:14.865500 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.865503 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.865506 2175536 command_runner.go:130] >       },
	I0916 11:07:14.865510 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865513 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865517 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865519 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865522 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865529 2175536 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:07:14.865532 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865537 2175536 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:07:14.865540 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865544 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865556 2175536 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 11:07:14.865559 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865562 2175536 command_runner.go:130] >       "size": "25687130",
	I0916 11:07:14.865566 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.865569 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.865572 2175536 command_runner.go:130] >       },
	I0916 11:07:14.865576 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865579 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865583 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865585 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865588 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865595 2175536 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:07:14.865598 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865603 2175536 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:07:14.865606 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865616 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865624 2175536 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 11:07:14.865628 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865631 2175536 command_runner.go:130] >       "size": "23948670",
	I0916 11:07:14.865635 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.865638 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.865641 2175536 command_runner.go:130] >       },
	I0916 11:07:14.865644 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865650 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865654 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865656 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865659 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865666 2175536 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:07:14.865669 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865674 2175536 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:07:14.865679 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865682 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865690 2175536 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 11:07:14.865693 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865697 2175536 command_runner.go:130] >       "size": "26756812",
	I0916 11:07:14.865700 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.865703 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865707 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865710 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865713 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865716 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865722 2175536 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:07:14.865726 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865731 2175536 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:07:14.865734 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865737 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865745 2175536 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:07:14.865749 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865756 2175536 command_runner.go:130] >       "size": "18507674",
	I0916 11:07:14.865759 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.865763 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.865766 2175536 command_runner.go:130] >       },
	I0916 11:07:14.865769 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865772 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865776 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865779 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865782 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865788 2175536 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:07:14.865792 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865796 2175536 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:07:14.865799 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865802 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865810 2175536 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:07:14.865813 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865816 2175536 command_runner.go:130] >       "size": "267933",
	I0916 11:07:14.865820 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.865824 2175536 command_runner.go:130] >         "value": "65535"
	I0916 11:07:14.865827 2175536 command_runner.go:130] >       },
	I0916 11:07:14.865830 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865834 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865837 2175536 command_runner.go:130] >       "pinned": true
	I0916 11:07:14.865840 2175536 command_runner.go:130] >     }
	I0916 11:07:14.865843 2175536 command_runner.go:130] >   ]
	I0916 11:07:14.865846 2175536 command_runner.go:130] > }
	I0916 11:07:14.868316 2175536 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 11:07:14.868340 2175536 containerd.go:534] Images already preloaded, skipping extraction
	I0916 11:07:14.868401 2175536 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:07:14.905234 2175536 command_runner.go:130] > {
	I0916 11:07:14.905255 2175536 command_runner.go:130] >   "images": [
	I0916 11:07:14.905260 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905272 2175536 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:07:14.905295 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905301 2175536 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:07:14.905304 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905308 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905317 2175536 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:07:14.905321 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905325 2175536 command_runner.go:130] >       "size": "33309097",
	I0916 11:07:14.905329 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.905333 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905337 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905341 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905344 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905348 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905356 2175536 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:07:14.905360 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905365 2175536 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:07:14.905368 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905372 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905381 2175536 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:07:14.905384 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905390 2175536 command_runner.go:130] >       "size": "8034419",
	I0916 11:07:14.905394 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.905399 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905402 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905406 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905412 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905415 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905428 2175536 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:07:14.905467 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905474 2175536 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:07:14.905477 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905483 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905497 2175536 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:07:14.905508 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905512 2175536 command_runner.go:130] >       "size": "16948420",
	I0916 11:07:14.905515 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.905520 2175536 command_runner.go:130] >       "username": "nonroot",
	I0916 11:07:14.905523 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905529 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905532 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905535 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905541 2175536 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:07:14.905545 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905549 2175536 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:07:14.905554 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905558 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905565 2175536 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 11:07:14.905568 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905572 2175536 command_runner.go:130] >       "size": "66535646",
	I0916 11:07:14.905575 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.905578 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.905581 2175536 command_runner.go:130] >       },
	I0916 11:07:14.905585 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905588 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905592 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905595 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905598 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905604 2175536 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:07:14.905608 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905613 2175536 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:07:14.905616 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905620 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905633 2175536 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 11:07:14.905638 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905642 2175536 command_runner.go:130] >       "size": "25687130",
	I0916 11:07:14.905645 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.905652 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.905655 2175536 command_runner.go:130] >       },
	I0916 11:07:14.905658 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905662 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905665 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905668 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905671 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905677 2175536 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:07:14.905681 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905686 2175536 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:07:14.905689 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905693 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905701 2175536 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 11:07:14.905704 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905708 2175536 command_runner.go:130] >       "size": "23948670",
	I0916 11:07:14.905711 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.905714 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.905717 2175536 command_runner.go:130] >       },
	I0916 11:07:14.905721 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905726 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905730 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905732 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905735 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905742 2175536 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:07:14.905745 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905750 2175536 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:07:14.905753 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905757 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905764 2175536 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 11:07:14.905767 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905771 2175536 command_runner.go:130] >       "size": "26756812",
	I0916 11:07:14.905774 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.905778 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905784 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905788 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905791 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905794 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905800 2175536 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:07:14.905804 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905809 2175536 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:07:14.905812 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905815 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905823 2175536 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:07:14.905826 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905830 2175536 command_runner.go:130] >       "size": "18507674",
	I0916 11:07:14.905833 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.905836 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.905839 2175536 command_runner.go:130] >       },
	I0916 11:07:14.905843 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905846 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905850 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905853 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905856 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905862 2175536 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:07:14.905866 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905870 2175536 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:07:14.905873 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905877 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905884 2175536 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:07:14.905887 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905892 2175536 command_runner.go:130] >       "size": "267933",
	I0916 11:07:14.905895 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.905899 2175536 command_runner.go:130] >         "value": "65535"
	I0916 11:07:14.905903 2175536 command_runner.go:130] >       },
	I0916 11:07:14.905907 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905910 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905917 2175536 command_runner.go:130] >       "pinned": true
	I0916 11:07:14.905920 2175536 command_runner.go:130] >     }
	I0916 11:07:14.905923 2175536 command_runner.go:130] >   ]
	I0916 11:07:14.905927 2175536 command_runner.go:130] > }
	I0916 11:07:14.908255 2175536 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 11:07:14.908275 2175536 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:07:14.908283 2175536 kubeadm.go:934] updating node { 192.168.58.2 8443 v1.31.1 containerd true true} ...
	I0916 11:07:14.908389 2175536 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-890146 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.58.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:07:14.908461 2175536 ssh_runner.go:195] Run: sudo crictl info
	I0916 11:07:14.944258 2175536 command_runner.go:130] > {
	I0916 11:07:14.944281 2175536 command_runner.go:130] >   "status": {
	I0916 11:07:14.944287 2175536 command_runner.go:130] >     "conditions": [
	I0916 11:07:14.944291 2175536 command_runner.go:130] >       {
	I0916 11:07:14.944296 2175536 command_runner.go:130] >         "type": "RuntimeReady",
	I0916 11:07:14.944300 2175536 command_runner.go:130] >         "status": true,
	I0916 11:07:14.944304 2175536 command_runner.go:130] >         "reason": "",
	I0916 11:07:14.944309 2175536 command_runner.go:130] >         "message": ""
	I0916 11:07:14.944315 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944318 2175536 command_runner.go:130] >       {
	I0916 11:07:14.944323 2175536 command_runner.go:130] >         "type": "NetworkReady",
	I0916 11:07:14.944326 2175536 command_runner.go:130] >         "status": true,
	I0916 11:07:14.944330 2175536 command_runner.go:130] >         "reason": "",
	I0916 11:07:14.944335 2175536 command_runner.go:130] >         "message": ""
	I0916 11:07:14.944343 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944347 2175536 command_runner.go:130] >       {
	I0916 11:07:14.944352 2175536 command_runner.go:130] >         "type": "ContainerdHasNoDeprecationWarnings",
	I0916 11:07:14.944356 2175536 command_runner.go:130] >         "status": true,
	I0916 11:07:14.944360 2175536 command_runner.go:130] >         "reason": "",
	I0916 11:07:14.944366 2175536 command_runner.go:130] >         "message": ""
	I0916 11:07:14.944369 2175536 command_runner.go:130] >       }
	I0916 11:07:14.944372 2175536 command_runner.go:130] >     ]
	I0916 11:07:14.944375 2175536 command_runner.go:130] >   },
	I0916 11:07:14.944379 2175536 command_runner.go:130] >   "cniconfig": {
	I0916 11:07:14.944384 2175536 command_runner.go:130] >     "PluginDirs": [
	I0916 11:07:14.944388 2175536 command_runner.go:130] >       "/opt/cni/bin"
	I0916 11:07:14.944396 2175536 command_runner.go:130] >     ],
	I0916 11:07:14.944404 2175536 command_runner.go:130] >     "PluginConfDir": "/etc/cni/net.d",
	I0916 11:07:14.944411 2175536 command_runner.go:130] >     "PluginMaxConfNum": 1,
	I0916 11:07:14.944414 2175536 command_runner.go:130] >     "Prefix": "eth",
	I0916 11:07:14.944418 2175536 command_runner.go:130] >     "Networks": [
	I0916 11:07:14.944421 2175536 command_runner.go:130] >       {
	I0916 11:07:14.944424 2175536 command_runner.go:130] >         "Config": {
	I0916 11:07:14.944428 2175536 command_runner.go:130] >           "Name": "cni-loopback",
	I0916 11:07:14.944432 2175536 command_runner.go:130] >           "CNIVersion": "0.3.1",
	I0916 11:07:14.944436 2175536 command_runner.go:130] >           "Plugins": [
	I0916 11:07:14.944439 2175536 command_runner.go:130] >             {
	I0916 11:07:14.944443 2175536 command_runner.go:130] >               "Network": {
	I0916 11:07:14.944447 2175536 command_runner.go:130] >                 "type": "loopback",
	I0916 11:07:14.944453 2175536 command_runner.go:130] >                 "ipam": {},
	I0916 11:07:14.944462 2175536 command_runner.go:130] >                 "dns": {}
	I0916 11:07:14.944466 2175536 command_runner.go:130] >               },
	I0916 11:07:14.944471 2175536 command_runner.go:130] >               "Source": "{\"type\":\"loopback\"}"
	I0916 11:07:14.944482 2175536 command_runner.go:130] >             }
	I0916 11:07:14.944485 2175536 command_runner.go:130] >           ],
	I0916 11:07:14.944498 2175536 command_runner.go:130] >           "Source": "{\n\"cniVersion\": \"0.3.1\",\n\"name\": \"cni-loopback\",\n\"plugins\": [{\n  \"type\": \"loopback\"\n}]\n}"
	I0916 11:07:14.944504 2175536 command_runner.go:130] >         },
	I0916 11:07:14.944508 2175536 command_runner.go:130] >         "IFName": "lo"
	I0916 11:07:14.944511 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944514 2175536 command_runner.go:130] >       {
	I0916 11:07:14.944517 2175536 command_runner.go:130] >         "Config": {
	I0916 11:07:14.944522 2175536 command_runner.go:130] >           "Name": "loopback",
	I0916 11:07:14.944526 2175536 command_runner.go:130] >           "CNIVersion": "1.0.0",
	I0916 11:07:14.944533 2175536 command_runner.go:130] >           "Plugins": [
	I0916 11:07:14.944537 2175536 command_runner.go:130] >             {
	I0916 11:07:14.944541 2175536 command_runner.go:130] >               "Network": {
	I0916 11:07:14.944547 2175536 command_runner.go:130] >                 "cniVersion": "1.0.0",
	I0916 11:07:14.944552 2175536 command_runner.go:130] >                 "name": "loopback",
	I0916 11:07:14.944556 2175536 command_runner.go:130] >                 "type": "loopback",
	I0916 11:07:14.944572 2175536 command_runner.go:130] >                 "ipam": {},
	I0916 11:07:14.944578 2175536 command_runner.go:130] >                 "dns": {}
	I0916 11:07:14.944583 2175536 command_runner.go:130] >               },
	I0916 11:07:14.944591 2175536 command_runner.go:130] >               "Source": "{\"cniVersion\":\"1.0.0\",\"name\":\"loopback\",\"type\":\"loopback\"}"
	I0916 11:07:14.944595 2175536 command_runner.go:130] >             }
	I0916 11:07:14.944598 2175536 command_runner.go:130] >           ],
	I0916 11:07:14.944608 2175536 command_runner.go:130] >           "Source": "{\"cniVersion\":\"1.0.0\",\"name\":\"loopback\",\"plugins\":[{\"cniVersion\":\"1.0.0\",\"name\":\"loopback\",\"type\":\"loopback\"}]}"
	I0916 11:07:14.944612 2175536 command_runner.go:130] >         },
	I0916 11:07:14.944616 2175536 command_runner.go:130] >         "IFName": "eth0"
	I0916 11:07:14.944622 2175536 command_runner.go:130] >       }
	I0916 11:07:14.944625 2175536 command_runner.go:130] >     ]
	I0916 11:07:14.944628 2175536 command_runner.go:130] >   },
	I0916 11:07:14.944631 2175536 command_runner.go:130] >   "config": {
	I0916 11:07:14.944635 2175536 command_runner.go:130] >     "containerd": {
	I0916 11:07:14.944642 2175536 command_runner.go:130] >       "snapshotter": "overlayfs",
	I0916 11:07:14.944647 2175536 command_runner.go:130] >       "defaultRuntimeName": "runc",
	I0916 11:07:14.944659 2175536 command_runner.go:130] >       "defaultRuntime": {
	I0916 11:07:14.944663 2175536 command_runner.go:130] >         "runtimeType": "",
	I0916 11:07:14.944668 2175536 command_runner.go:130] >         "runtimePath": "",
	I0916 11:07:14.944673 2175536 command_runner.go:130] >         "runtimeEngine": "",
	I0916 11:07:14.944677 2175536 command_runner.go:130] >         "PodAnnotations": null,
	I0916 11:07:14.944687 2175536 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 11:07:14.944697 2175536 command_runner.go:130] >         "runtimeRoot": "",
	I0916 11:07:14.944701 2175536 command_runner.go:130] >         "options": null,
	I0916 11:07:14.944706 2175536 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 11:07:14.944715 2175536 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:07:14.944723 2175536 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 11:07:14.944727 2175536 command_runner.go:130] >         "cniConfDir": "",
	I0916 11:07:14.944732 2175536 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 11:07:14.944739 2175536 command_runner.go:130] >         "snapshotter": "",
	I0916 11:07:14.944743 2175536 command_runner.go:130] >         "sandboxMode": ""
	I0916 11:07:14.944746 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944756 2175536 command_runner.go:130] >       "untrustedWorkloadRuntime": {
	I0916 11:07:14.944765 2175536 command_runner.go:130] >         "runtimeType": "",
	I0916 11:07:14.944771 2175536 command_runner.go:130] >         "runtimePath": "",
	I0916 11:07:14.944776 2175536 command_runner.go:130] >         "runtimeEngine": "",
	I0916 11:07:14.944780 2175536 command_runner.go:130] >         "PodAnnotations": null,
	I0916 11:07:14.944784 2175536 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 11:07:14.944788 2175536 command_runner.go:130] >         "runtimeRoot": "",
	I0916 11:07:14.944792 2175536 command_runner.go:130] >         "options": null,
	I0916 11:07:14.944797 2175536 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 11:07:14.944803 2175536 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:07:14.944810 2175536 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 11:07:14.944814 2175536 command_runner.go:130] >         "cniConfDir": "",
	I0916 11:07:14.944818 2175536 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 11:07:14.944824 2175536 command_runner.go:130] >         "snapshotter": "",
	I0916 11:07:14.944830 2175536 command_runner.go:130] >         "sandboxMode": ""
	I0916 11:07:14.944833 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944841 2175536 command_runner.go:130] >       "runtimes": {
	I0916 11:07:14.944847 2175536 command_runner.go:130] >         "runc": {
	I0916 11:07:14.944851 2175536 command_runner.go:130] >           "runtimeType": "io.containerd.runc.v2",
	I0916 11:07:14.944858 2175536 command_runner.go:130] >           "runtimePath": "",
	I0916 11:07:14.944864 2175536 command_runner.go:130] >           "runtimeEngine": "",
	I0916 11:07:14.944868 2175536 command_runner.go:130] >           "PodAnnotations": null,
	I0916 11:07:14.944873 2175536 command_runner.go:130] >           "ContainerAnnotations": null,
	I0916 11:07:14.944877 2175536 command_runner.go:130] >           "runtimeRoot": "",
	I0916 11:07:14.944881 2175536 command_runner.go:130] >           "options": {
	I0916 11:07:14.944885 2175536 command_runner.go:130] >             "SystemdCgroup": false
	I0916 11:07:14.944891 2175536 command_runner.go:130] >           },
	I0916 11:07:14.944901 2175536 command_runner.go:130] >           "privileged_without_host_devices": false,
	I0916 11:07:14.944909 2175536 command_runner.go:130] >           "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:07:14.944913 2175536 command_runner.go:130] >           "baseRuntimeSpec": "",
	I0916 11:07:14.944917 2175536 command_runner.go:130] >           "cniConfDir": "",
	I0916 11:07:14.944923 2175536 command_runner.go:130] >           "cniMaxConfNum": 0,
	I0916 11:07:14.944927 2175536 command_runner.go:130] >           "snapshotter": "",
	I0916 11:07:14.944935 2175536 command_runner.go:130] >           "sandboxMode": "podsandbox"
	I0916 11:07:14.944947 2175536 command_runner.go:130] >         }
	I0916 11:07:14.944953 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944957 2175536 command_runner.go:130] >       "noPivot": false,
	I0916 11:07:14.944963 2175536 command_runner.go:130] >       "disableSnapshotAnnotations": true,
	I0916 11:07:14.944968 2175536 command_runner.go:130] >       "discardUnpackedLayers": true,
	I0916 11:07:14.944977 2175536 command_runner.go:130] >       "ignoreBlockIONotEnabledErrors": false,
	I0916 11:07:14.944985 2175536 command_runner.go:130] >       "ignoreRdtNotEnabledErrors": false
	I0916 11:07:14.944987 2175536 command_runner.go:130] >     },
	I0916 11:07:14.944993 2175536 command_runner.go:130] >     "cni": {
	I0916 11:07:14.944997 2175536 command_runner.go:130] >       "binDir": "/opt/cni/bin",
	I0916 11:07:14.945002 2175536 command_runner.go:130] >       "confDir": "/etc/cni/net.d",
	I0916 11:07:14.945006 2175536 command_runner.go:130] >       "maxConfNum": 1,
	I0916 11:07:14.945013 2175536 command_runner.go:130] >       "setupSerially": false,
	I0916 11:07:14.945022 2175536 command_runner.go:130] >       "confTemplate": "",
	I0916 11:07:14.945026 2175536 command_runner.go:130] >       "ipPref": ""
	I0916 11:07:14.945032 2175536 command_runner.go:130] >     },
	I0916 11:07:14.945035 2175536 command_runner.go:130] >     "registry": {
	I0916 11:07:14.945045 2175536 command_runner.go:130] >       "configPath": "/etc/containerd/certs.d",
	I0916 11:07:14.945051 2175536 command_runner.go:130] >       "mirrors": null,
	I0916 11:07:14.945055 2175536 command_runner.go:130] >       "configs": null,
	I0916 11:07:14.945061 2175536 command_runner.go:130] >       "auths": null,
	I0916 11:07:14.945064 2175536 command_runner.go:130] >       "headers": null
	I0916 11:07:14.945070 2175536 command_runner.go:130] >     },
	I0916 11:07:14.945074 2175536 command_runner.go:130] >     "imageDecryption": {
	I0916 11:07:14.945090 2175536 command_runner.go:130] >       "keyModel": "node"
	I0916 11:07:14.945110 2175536 command_runner.go:130] >     },
	I0916 11:07:14.945117 2175536 command_runner.go:130] >     "disableTCPService": true,
	I0916 11:07:14.945122 2175536 command_runner.go:130] >     "streamServerAddress": "",
	I0916 11:07:14.945128 2175536 command_runner.go:130] >     "streamServerPort": "10010",
	I0916 11:07:14.945133 2175536 command_runner.go:130] >     "streamIdleTimeout": "4h0m0s",
	I0916 11:07:14.945137 2175536 command_runner.go:130] >     "enableSelinux": false,
	I0916 11:07:14.945141 2175536 command_runner.go:130] >     "selinuxCategoryRange": 1024,
	I0916 11:07:14.945148 2175536 command_runner.go:130] >     "sandboxImage": "registry.k8s.io/pause:3.10",
	I0916 11:07:14.945164 2175536 command_runner.go:130] >     "statsCollectPeriod": 10,
	I0916 11:07:14.945169 2175536 command_runner.go:130] >     "systemdCgroup": false,
	I0916 11:07:14.945173 2175536 command_runner.go:130] >     "enableTLSStreaming": false,
	I0916 11:07:14.945177 2175536 command_runner.go:130] >     "x509KeyPairStreaming": {
	I0916 11:07:14.945188 2175536 command_runner.go:130] >       "tlsCertFile": "",
	I0916 11:07:14.945192 2175536 command_runner.go:130] >       "tlsKeyFile": ""
	I0916 11:07:14.945194 2175536 command_runner.go:130] >     },
	I0916 11:07:14.945199 2175536 command_runner.go:130] >     "maxContainerLogSize": 16384,
	I0916 11:07:14.945202 2175536 command_runner.go:130] >     "disableCgroup": false,
	I0916 11:07:14.945206 2175536 command_runner.go:130] >     "disableApparmor": false,
	I0916 11:07:14.945210 2175536 command_runner.go:130] >     "restrictOOMScoreAdj": false,
	I0916 11:07:14.945214 2175536 command_runner.go:130] >     "maxConcurrentDownloads": 3,
	I0916 11:07:14.945219 2175536 command_runner.go:130] >     "disableProcMount": false,
	I0916 11:07:14.945222 2175536 command_runner.go:130] >     "unsetSeccompProfile": "",
	I0916 11:07:14.945227 2175536 command_runner.go:130] >     "tolerateMissingHugetlbController": true,
	I0916 11:07:14.945232 2175536 command_runner.go:130] >     "disableHugetlbController": true,
	I0916 11:07:14.945237 2175536 command_runner.go:130] >     "device_ownership_from_security_context": false,
	I0916 11:07:14.945242 2175536 command_runner.go:130] >     "ignoreImageDefinedVolumes": false,
	I0916 11:07:14.945249 2175536 command_runner.go:130] >     "netnsMountsUnderStateDir": false,
	I0916 11:07:14.945253 2175536 command_runner.go:130] >     "enableUnprivilegedPorts": true,
	I0916 11:07:14.945258 2175536 command_runner.go:130] >     "enableUnprivilegedICMP": false,
	I0916 11:07:14.945264 2175536 command_runner.go:130] >     "enableCDI": false,
	I0916 11:07:14.945267 2175536 command_runner.go:130] >     "cdiSpecDirs": [
	I0916 11:07:14.945271 2175536 command_runner.go:130] >       "/etc/cdi",
	I0916 11:07:14.945274 2175536 command_runner.go:130] >       "/var/run/cdi"
	I0916 11:07:14.945278 2175536 command_runner.go:130] >     ],
	I0916 11:07:14.945282 2175536 command_runner.go:130] >     "imagePullProgressTimeout": "5m0s",
	I0916 11:07:14.945287 2175536 command_runner.go:130] >     "drainExecSyncIOTimeout": "0s",
	I0916 11:07:14.945294 2175536 command_runner.go:130] >     "imagePullWithSyncFs": false,
	I0916 11:07:14.945299 2175536 command_runner.go:130] >     "ignoreDeprecationWarnings": null,
	I0916 11:07:14.945304 2175536 command_runner.go:130] >     "containerdRootDir": "/var/lib/containerd",
	I0916 11:07:14.945309 2175536 command_runner.go:130] >     "containerdEndpoint": "/run/containerd/containerd.sock",
	I0916 11:07:14.945315 2175536 command_runner.go:130] >     "rootDir": "/var/lib/containerd/io.containerd.grpc.v1.cri",
	I0916 11:07:14.945324 2175536 command_runner.go:130] >     "stateDir": "/run/containerd/io.containerd.grpc.v1.cri"
	I0916 11:07:14.945329 2175536 command_runner.go:130] >   },
	I0916 11:07:14.945333 2175536 command_runner.go:130] >   "golang": "go1.22.7",
	I0916 11:07:14.945337 2175536 command_runner.go:130] >   "lastCNILoadStatus": "OK",
	I0916 11:07:14.945348 2175536 command_runner.go:130] >   "lastCNILoadStatus.default": "OK"
	I0916 11:07:14.945351 2175536 command_runner.go:130] > }
	I0916 11:07:14.948488 2175536 cni.go:84] Creating CNI manager for ""
	I0916 11:07:14.948515 2175536 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 11:07:14.948525 2175536 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:07:14.948549 2175536 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.58.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-890146 NodeName:multinode-890146 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.58.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.58.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPat
h:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:07:14.948685 2175536 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.58.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "multinode-890146"
	  kubeletExtraArgs:
	    node-ip: 192.168.58.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.58.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:07:14.948763 2175536 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:07:14.957220 2175536 command_runner.go:130] > kubeadm
	I0916 11:07:14.957295 2175536 command_runner.go:130] > kubectl
	I0916 11:07:14.957315 2175536 command_runner.go:130] > kubelet
	I0916 11:07:14.958383 2175536 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:07:14.958446 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 11:07:14.968059 2175536 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (320 bytes)
	I0916 11:07:14.986524 2175536 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:07:15.027083 2175536 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2170 bytes)
	I0916 11:07:15.049929 2175536 ssh_runner.go:195] Run: grep 192.168.58.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:07:15.054347 2175536 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.58.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:07:15.066765 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:07:15.161114 2175536 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:07:15.178638 2175536 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146 for IP: 192.168.58.2
	I0916 11:07:15.178659 2175536 certs.go:194] generating shared ca certs ...
	I0916 11:07:15.178685 2175536 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:15.178881 2175536 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 11:07:15.178956 2175536 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 11:07:15.178970 2175536 certs.go:256] generating profile certs ...
	I0916 11:07:15.179046 2175536 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key
	I0916 11:07:15.179064 2175536 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt with IP's: []
	I0916 11:07:15.598940 2175536 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt ...
	I0916 11:07:15.598971 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt: {Name:mk7ab98d016c599af820ddb3ea5f73c56de76d66 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:15.599197 2175536 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key ...
	I0916 11:07:15.599213 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key: {Name:mkca0d412ad6145c6ef5271650396cc573c31df2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:15.599309 2175536 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key.cd70a0e7
	I0916 11:07:15.599325 2175536 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt.cd70a0e7 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.58.2]
	I0916 11:07:16.019953 2175536 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt.cd70a0e7 ...
	I0916 11:07:16.019989 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt.cd70a0e7: {Name:mk95dd49b821f230f6c0530baed7f4bfdf8c60ac Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:16.020222 2175536 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key.cd70a0e7 ...
	I0916 11:07:16.020240 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key.cd70a0e7: {Name:mkbb51a1e4c459f22a404891e38caed47b3772c4 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:16.020343 2175536 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt.cd70a0e7 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt
	I0916 11:07:16.020436 2175536 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key.cd70a0e7 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key
	I0916 11:07:16.020508 2175536 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key
	I0916 11:07:16.020529 2175536 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt with IP's: []
	I0916 11:07:16.825523 2175536 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt ...
	I0916 11:07:16.825555 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt: {Name:mka686c367fd92977dbf5d5c98408fe58e5a5937 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:16.825732 2175536 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key ...
	I0916 11:07:16.825747 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key: {Name:mkce6bfd8cfba42678bcd3dc3e58f7a0a170a962 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:16.825852 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:07:16.825877 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:07:16.825893 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:07:16.825912 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:07:16.825923 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 11:07:16.825939 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 11:07:16.825950 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 11:07:16.825960 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 11:07:16.826017 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 11:07:16.826057 2175536 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 11:07:16.826068 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:07:16.826093 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 11:07:16.826121 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:07:16.826146 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 11:07:16.826190 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:07:16.826222 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:07:16.826243 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 11:07:16.826254 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 11:07:16.826850 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:07:16.868216 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 11:07:16.895839 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:07:16.921370 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 11:07:16.948177 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 11:07:16.973606 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 11:07:16.998400 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:07:17.025704 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 11:07:17.051525 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:07:17.078973 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 11:07:17.103671 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 11:07:17.128356 2175536 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 11:07:17.146818 2175536 ssh_runner.go:195] Run: openssl version
	I0916 11:07:17.152096 2175536 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:07:17.152534 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 11:07:17.162304 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 11:07:17.165902 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:07:17.165941 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:07:17.165992 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 11:07:17.172790 2175536 command_runner.go:130] > 51391683
	I0916 11:07:17.173264 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 11:07:17.182982 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 11:07:17.192748 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 11:07:17.196241 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:07:17.196278 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:07:17.196352 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 11:07:17.203077 2175536 command_runner.go:130] > 3ec20f2e
	I0916 11:07:17.203524 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:07:17.213254 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:07:17.223233 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:07:17.227103 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:07:17.227139 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:07:17.227200 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:07:17.233935 2175536 command_runner.go:130] > b5213941
	I0916 11:07:17.234440 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:07:17.244114 2175536 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:07:17.247546 2175536 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:07:17.247583 2175536 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:07:17.247622 2175536 kubeadm.go:392] StartCluster: {Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false C
ustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:07:17.247709 2175536 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 11:07:17.247773 2175536 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 11:07:17.284532 2175536 cri.go:89] found id: ""
	I0916 11:07:17.284626 2175536 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 11:07:17.293414 2175536 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0916 11:07:17.293441 2175536 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0916 11:07:17.293449 2175536 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0916 11:07:17.293513 2175536 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 11:07:17.302186 2175536 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 11:07:17.302252 2175536 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 11:07:17.310213 2175536 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0916 11:07:17.310239 2175536 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0916 11:07:17.310247 2175536 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0916 11:07:17.310256 2175536 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 11:07:17.311438 2175536 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 11:07:17.311459 2175536 kubeadm.go:157] found existing configuration files:
	
	I0916 11:07:17.311542 2175536 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 11:07:17.320590 2175536 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 11:07:17.320663 2175536 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 11:07:17.320751 2175536 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 11:07:17.329389 2175536 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 11:07:17.338451 2175536 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 11:07:17.338499 2175536 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 11:07:17.338564 2175536 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 11:07:17.347101 2175536 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 11:07:17.355757 2175536 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 11:07:17.355806 2175536 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 11:07:17.355886 2175536 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 11:07:17.364598 2175536 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 11:07:17.373592 2175536 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 11:07:17.373644 2175536 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 11:07:17.373702 2175536 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 11:07:17.382440 2175536 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 11:07:17.424574 2175536 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 11:07:17.424604 2175536 command_runner.go:130] > [init] Using Kubernetes version: v1.31.1
	I0916 11:07:17.424647 2175536 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 11:07:17.424657 2175536 command_runner.go:130] > [preflight] Running pre-flight checks
	I0916 11:07:17.444730 2175536 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:07:17.444785 2175536 command_runner.go:130] > [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:07:17.444841 2175536 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:07:17.444851 2175536 command_runner.go:130] > KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:07:17.444884 2175536 kubeadm.go:310] OS: Linux
	I0916 11:07:17.444892 2175536 command_runner.go:130] > OS: Linux
	I0916 11:07:17.444936 2175536 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 11:07:17.444944 2175536 command_runner.go:130] > CGROUPS_CPU: enabled
	I0916 11:07:17.444990 2175536 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 11:07:17.444997 2175536 command_runner.go:130] > CGROUPS_CPUACCT: enabled
	I0916 11:07:17.445043 2175536 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 11:07:17.445051 2175536 command_runner.go:130] > CGROUPS_CPUSET: enabled
	I0916 11:07:17.445097 2175536 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 11:07:17.445105 2175536 command_runner.go:130] > CGROUPS_DEVICES: enabled
	I0916 11:07:17.445152 2175536 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 11:07:17.445170 2175536 command_runner.go:130] > CGROUPS_FREEZER: enabled
	I0916 11:07:17.445220 2175536 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 11:07:17.445227 2175536 command_runner.go:130] > CGROUPS_MEMORY: enabled
	I0916 11:07:17.445271 2175536 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 11:07:17.445278 2175536 command_runner.go:130] > CGROUPS_PIDS: enabled
	I0916 11:07:17.445324 2175536 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 11:07:17.445331 2175536 command_runner.go:130] > CGROUPS_HUGETLB: enabled
	I0916 11:07:17.445376 2175536 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 11:07:17.445383 2175536 command_runner.go:130] > CGROUPS_BLKIO: enabled
	I0916 11:07:17.522659 2175536 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 11:07:17.522702 2175536 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 11:07:17.522798 2175536 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 11:07:17.522808 2175536 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 11:07:17.522897 2175536 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 11:07:17.522905 2175536 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 11:07:17.531901 2175536 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 11:07:17.532135 2175536 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 11:07:17.535785 2175536 out.go:235]   - Generating certificates and keys ...
	I0916 11:07:17.535897 2175536 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0916 11:07:17.535912 2175536 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 11:07:17.535978 2175536 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0916 11:07:17.535987 2175536 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 11:07:17.784579 2175536 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 11:07:17.784609 2175536 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 11:07:19.375182 2175536 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 11:07:19.375210 2175536 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0916 11:07:19.850362 2175536 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 11:07:19.850385 2175536 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0916 11:07:20.229756 2175536 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 11:07:20.229785 2175536 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0916 11:07:20.759185 2175536 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 11:07:20.759217 2175536 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0916 11:07:20.759502 2175536 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-890146] and IPs [192.168.58.2 127.0.0.1 ::1]
	I0916 11:07:20.759518 2175536 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-890146] and IPs [192.168.58.2 127.0.0.1 ::1]
	I0916 11:07:21.279131 2175536 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 11:07:21.279162 2175536 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0916 11:07:21.279291 2175536 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-890146] and IPs [192.168.58.2 127.0.0.1 ::1]
	I0916 11:07:21.279305 2175536 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-890146] and IPs [192.168.58.2 127.0.0.1 ::1]
	I0916 11:07:21.571891 2175536 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 11:07:21.571918 2175536 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 11:07:22.084709 2175536 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 11:07:22.084737 2175536 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 11:07:22.479391 2175536 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 11:07:22.479417 2175536 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0916 11:07:22.479682 2175536 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:07:22.479694 2175536 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:07:23.087957 2175536 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:07:23.087993 2175536 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:07:23.399549 2175536 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:07:23.399574 2175536 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:07:23.772170 2175536 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:07:23.772197 2175536 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:07:24.211547 2175536 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:07:24.211573 2175536 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:07:24.459970 2175536 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:07:24.459995 2175536 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:07:24.460618 2175536 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:07:24.460636 2175536 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:07:24.464402 2175536 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:07:24.464578 2175536 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:07:24.467079 2175536 out.go:235]   - Booting up control plane ...
	I0916 11:07:24.467193 2175536 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:07:24.467205 2175536 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:07:24.467583 2175536 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:07:24.467605 2175536 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:07:24.468768 2175536 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:07:24.468786 2175536 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:07:24.479816 2175536 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:07:24.479842 2175536 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:07:24.486177 2175536 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:07:24.486202 2175536 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:07:24.486455 2175536 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 11:07:24.486467 2175536 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0916 11:07:24.591102 2175536 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:07:24.591127 2175536 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:07:24.591231 2175536 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:07:24.591235 2175536 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:07:25.586751 2175536 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00150665s
	I0916 11:07:25.586777 2175536 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.00150665s
	I0916 11:07:25.586896 2175536 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:07:25.586909 2175536 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:07:32.088377 2175536 kubeadm.go:310] [api-check] The API server is healthy after 6.501798255s
	I0916 11:07:32.088402 2175536 command_runner.go:130] > [api-check] The API server is healthy after 6.501798255s
	I0916 11:07:32.112663 2175536 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:07:32.112686 2175536 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:07:32.137967 2175536 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:07:32.137990 2175536 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:07:32.181676 2175536 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:07:32.181708 2175536 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:07:32.181900 2175536 kubeadm.go:310] [mark-control-plane] Marking the node multinode-890146 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:07:32.181906 2175536 command_runner.go:130] > [mark-control-plane] Marking the node multinode-890146 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:07:32.194621 2175536 kubeadm.go:310] [bootstrap-token] Using token: q96uai.z85znounub5fn5uo
	I0916 11:07:32.194796 2175536 command_runner.go:130] > [bootstrap-token] Using token: q96uai.z85znounub5fn5uo
	I0916 11:07:32.196352 2175536 out.go:235]   - Configuring RBAC rules ...
	I0916 11:07:32.196484 2175536 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:07:32.196494 2175536 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:07:32.203638 2175536 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:07:32.203663 2175536 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:07:32.218667 2175536 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:07:32.218713 2175536 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:07:32.229888 2175536 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:07:32.229913 2175536 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:07:32.238376 2175536 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:07:32.238401 2175536 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:07:32.247247 2175536 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:07:32.247275 2175536 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:07:32.496151 2175536 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:07:32.496178 2175536 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:07:32.919854 2175536 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 11:07:32.919880 2175536 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0916 11:07:33.495386 2175536 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 11:07:33.495408 2175536 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0916 11:07:33.496574 2175536 kubeadm.go:310] 
	I0916 11:07:33.496666 2175536 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 11:07:33.496680 2175536 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0916 11:07:33.496687 2175536 kubeadm.go:310] 
	I0916 11:07:33.496771 2175536 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 11:07:33.496779 2175536 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0916 11:07:33.496784 2175536 kubeadm.go:310] 
	I0916 11:07:33.496810 2175536 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 11:07:33.496817 2175536 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0916 11:07:33.496875 2175536 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:07:33.496892 2175536 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:07:33.496965 2175536 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:07:33.496997 2175536 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:07:33.497003 2175536 kubeadm.go:310] 
	I0916 11:07:33.497061 2175536 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 11:07:33.497066 2175536 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0916 11:07:33.497070 2175536 kubeadm.go:310] 
	I0916 11:07:33.497117 2175536 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:07:33.497122 2175536 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:07:33.497126 2175536 kubeadm.go:310] 
	I0916 11:07:33.497177 2175536 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 11:07:33.497181 2175536 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0916 11:07:33.497254 2175536 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:07:33.497258 2175536 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:07:33.497325 2175536 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:07:33.497328 2175536 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:07:33.497332 2175536 kubeadm.go:310] 
	I0916 11:07:33.497414 2175536 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:07:33.497418 2175536 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:07:33.497494 2175536 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 11:07:33.497498 2175536 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0916 11:07:33.497502 2175536 kubeadm.go:310] 
	I0916 11:07:33.497584 2175536 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token q96uai.z85znounub5fn5uo \
	I0916 11:07:33.497587 2175536 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token q96uai.z85znounub5fn5uo \
	I0916 11:07:33.497688 2175536 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 11:07:33.497692 2175536 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 11:07:33.497712 2175536 kubeadm.go:310] 	--control-plane 
	I0916 11:07:33.497716 2175536 command_runner.go:130] > 	--control-plane 
	I0916 11:07:33.497720 2175536 kubeadm.go:310] 
	I0916 11:07:33.497804 2175536 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:07:33.497808 2175536 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:07:33.497812 2175536 kubeadm.go:310] 
	I0916 11:07:33.497893 2175536 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token q96uai.z85znounub5fn5uo \
	I0916 11:07:33.497897 2175536 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token q96uai.z85znounub5fn5uo \
	I0916 11:07:33.497996 2175536 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 11:07:33.498000 2175536 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 11:07:33.502287 2175536 kubeadm.go:310] W0916 11:07:17.421188    1046 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:07:33.502315 2175536 command_runner.go:130] ! W0916 11:07:17.421188    1046 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:07:33.502646 2175536 kubeadm.go:310] W0916 11:07:17.422059    1046 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:07:33.502669 2175536 command_runner.go:130] ! W0916 11:07:17.422059    1046 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:07:33.502941 2175536 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:07:33.502955 2175536 command_runner.go:130] ! 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:07:33.503062 2175536 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:07:33.503072 2175536 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:07:33.503090 2175536 cni.go:84] Creating CNI manager for ""
	I0916 11:07:33.503101 2175536 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 11:07:33.505082 2175536 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 11:07:33.506860 2175536 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 11:07:33.510659 2175536 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0916 11:07:33.510724 2175536 command_runner.go:130] >   Size: 4030506   	Blocks: 7880       IO Block: 4096   regular file
	I0916 11:07:33.510732 2175536 command_runner.go:130] > Device: 3ch/60d	Inode: 1314974     Links: 1
	I0916 11:07:33.510738 2175536 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:07:33.510746 2175536 command_runner.go:130] > Access: 2023-12-04 16:39:54.000000000 +0000
	I0916 11:07:33.510751 2175536 command_runner.go:130] > Modify: 2023-12-04 16:39:54.000000000 +0000
	I0916 11:07:33.510755 2175536 command_runner.go:130] > Change: 2024-09-16 10:29:57.653995685 +0000
	I0916 11:07:33.510760 2175536 command_runner.go:130] >  Birth: 2024-09-16 10:29:57.597996108 +0000
	I0916 11:07:33.510910 2175536 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 11:07:33.510925 2175536 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 11:07:33.531123 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 11:07:33.781485 2175536 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0916 11:07:33.789913 2175536 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0916 11:07:33.800337 2175536 command_runner.go:130] > serviceaccount/kindnet created
	I0916 11:07:33.811879 2175536 command_runner.go:130] > daemonset.apps/kindnet created
	I0916 11:07:33.816494 2175536 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 11:07:33.816687 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:33.816774 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-890146 minikube.k8s.io/updated_at=2024_09_16T11_07_33_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=multinode-890146 minikube.k8s.io/primary=true
	I0916 11:07:34.046500 2175536 command_runner.go:130] > node/multinode-890146 labeled
	I0916 11:07:34.057074 2175536 command_runner.go:130] > -16
	I0916 11:07:34.068365 2175536 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0916 11:07:34.074023 2175536 ops.go:34] apiserver oom_adj: -16
	I0916 11:07:34.074134 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:34.178397 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:34.575029 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:34.666377 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:35.074264 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:35.163614 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:35.575071 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:35.661953 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:36.074521 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:36.171090 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:36.574610 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:36.709592 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:37.075111 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:37.169222 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:37.574790 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:37.682452 2175536 command_runner.go:130] > NAME      SECRETS   AGE
	I0916 11:07:37.682475 2175536 command_runner.go:130] > default   0         0s
	I0916 11:07:37.682514 2175536 kubeadm.go:1113] duration metric: took 3.865900406s to wait for elevateKubeSystemPrivileges
	I0916 11:07:37.682530 2175536 kubeadm.go:394] duration metric: took 20.434911561s to StartCluster
	I0916 11:07:37.682548 2175536 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:37.682618 2175536 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:07:37.683362 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:37.683598 2175536 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:07:37.683702 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 11:07:37.683928 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:07:37.683976 2175536 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:07:37.684044 2175536 addons.go:69] Setting storage-provisioner=true in profile "multinode-890146"
	I0916 11:07:37.684059 2175536 addons.go:234] Setting addon storage-provisioner=true in "multinode-890146"
	I0916 11:07:37.684085 2175536 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:07:37.684819 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:37.685206 2175536 addons.go:69] Setting default-storageclass=true in profile "multinode-890146"
	I0916 11:07:37.685232 2175536 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-890146"
	I0916 11:07:37.685494 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:37.687827 2175536 out.go:177] * Verifying Kubernetes components...
	I0916 11:07:37.689797 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:07:37.718484 2175536 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:07:37.720410 2175536 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:07:37.720429 2175536 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 11:07:37.720494 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:37.730359 2175536 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:07:37.730639 2175536 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:07:37.731502 2175536 addons.go:234] Setting addon default-storageclass=true in "multinode-890146"
	I0916 11:07:37.731532 2175536 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:07:37.731955 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:37.732166 2175536 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 11:07:37.776744 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:37.782305 2175536 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 11:07:37.782325 2175536 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 11:07:37.782387 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:37.810615 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:37.985465 2175536 command_runner.go:130] > apiVersion: v1
	I0916 11:07:37.985498 2175536 command_runner.go:130] > data:
	I0916 11:07:37.985503 2175536 command_runner.go:130] >   Corefile: |
	I0916 11:07:37.985507 2175536 command_runner.go:130] >     .:53 {
	I0916 11:07:37.985510 2175536 command_runner.go:130] >         errors
	I0916 11:07:37.985515 2175536 command_runner.go:130] >         health {
	I0916 11:07:37.985545 2175536 command_runner.go:130] >            lameduck 5s
	I0916 11:07:37.985556 2175536 command_runner.go:130] >         }
	I0916 11:07:37.985579 2175536 command_runner.go:130] >         ready
	I0916 11:07:37.985592 2175536 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0916 11:07:37.985596 2175536 command_runner.go:130] >            pods insecure
	I0916 11:07:37.985602 2175536 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0916 11:07:37.985621 2175536 command_runner.go:130] >            ttl 30
	I0916 11:07:37.985637 2175536 command_runner.go:130] >         }
	I0916 11:07:37.985655 2175536 command_runner.go:130] >         prometheus :9153
	I0916 11:07:37.985662 2175536 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0916 11:07:37.985677 2175536 command_runner.go:130] >            max_concurrent 1000
	I0916 11:07:37.985680 2175536 command_runner.go:130] >         }
	I0916 11:07:37.985685 2175536 command_runner.go:130] >         cache 30
	I0916 11:07:37.985689 2175536 command_runner.go:130] >         loop
	I0916 11:07:37.985693 2175536 command_runner.go:130] >         reload
	I0916 11:07:37.985696 2175536 command_runner.go:130] >         loadbalance
	I0916 11:07:37.985704 2175536 command_runner.go:130] >     }
	I0916 11:07:37.985708 2175536 command_runner.go:130] > kind: ConfigMap
	I0916 11:07:37.985711 2175536 command_runner.go:130] > metadata:
	I0916 11:07:37.985732 2175536 command_runner.go:130] >   creationTimestamp: "2024-09-16T11:07:32Z"
	I0916 11:07:37.985742 2175536 command_runner.go:130] >   name: coredns
	I0916 11:07:37.985746 2175536 command_runner.go:130] >   namespace: kube-system
	I0916 11:07:37.985750 2175536 command_runner.go:130] >   resourceVersion: "270"
	I0916 11:07:37.985765 2175536 command_runner.go:130] >   uid: 0b2eb92e-2dcd-4757-9f54-c5a717906b13
	I0916 11:07:37.990194 2175536 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:07:37.990418 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.58.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 11:07:38.031706 2175536 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 11:07:38.091657 2175536 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:07:38.576844 2175536 command_runner.go:130] > configmap/coredns replaced
	I0916 11:07:38.583409 2175536 start.go:971] {"host.minikube.internal": 192.168.58.1} host record injected into CoreDNS's ConfigMap
	I0916 11:07:38.584195 2175536 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:07:38.584499 2175536 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:07:38.584953 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:07:38.583598 2175536 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0916 11:07:38.583888 2175536 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:07:38.585114 2175536 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 11:07:38.585141 2175536 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 11:07:38.585199 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/apis/storage.k8s.io/v1/storageclasses
	I0916 11:07:38.585209 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.585217 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.585227 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.585340 2175536 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:07:38.586158 2175536 node_ready.go:35] waiting up to 6m0s for node "multinode-890146" to be "Ready" ...
	I0916 11:07:38.585027 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.586324 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.586361 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.586307 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:38.586971 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.587013 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.587033 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.614333 2175536 round_trippers.go:574] Response Status: 200 OK in 27 milliseconds
	I0916 11:07:38.614362 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.614370 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.614375 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.614379 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.614383 2175536 round_trippers.go:580]     Content-Length: 291
	I0916 11:07:38.614385 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.614389 2175536 round_trippers.go:580]     Audit-Id: 4a094915-a6d5-4128-9d47-235633d59b72
	I0916 11:07:38.614391 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.614415 2175536 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"517b28ab-923f-4c66-9f57-81af25d1c992","resourceVersion":"385","creationTimestamp":"2024-09-16T11:07:32Z"},"spec":{"replicas":2},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:07:38.614886 2175536 request.go:1351] Request Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"517b28ab-923f-4c66-9f57-81af25d1c992","resourceVersion":"385","creationTimestamp":"2024-09-16T11:07:32Z"},"spec":{"replicas":1},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:07:38.614950 2175536 round_trippers.go:463] PUT https://192.168.58.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:07:38.614963 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.614972 2175536 round_trippers.go:473]     Content-Type: application/json
	I0916 11:07:38.614977 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.614981 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.615192 2175536 round_trippers.go:574] Response Status: 200 OK in 28 milliseconds
	I0916 11:07:38.615210 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.615217 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.615223 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.615227 2175536 round_trippers.go:580]     Audit-Id: 859e4c5d-5c9d-471b-8619-3ea6d9bf3407
	I0916 11:07:38.615237 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.615244 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.615247 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.615324 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:38.615959 2175536 node_ready.go:49] node "multinode-890146" has status "Ready":"True"
	I0916 11:07:38.615986 2175536 node_ready.go:38] duration metric: took 29.750107ms for node "multinode-890146" to be "Ready" ...
	I0916 11:07:38.616008 2175536 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:07:38.616114 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:07:38.616128 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.616137 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.616149 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.620217 2175536 round_trippers.go:574] Response Status: 200 OK in 34 milliseconds
	I0916 11:07:38.620246 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.620254 2175536 round_trippers.go:580]     Content-Length: 1273
	I0916 11:07:38.620285 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.620313 2175536 round_trippers.go:580]     Audit-Id: 283c52c3-8695-4b9d-9523-92eb041f0f49
	I0916 11:07:38.620317 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.620320 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.620323 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.620332 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.620384 2175536 request.go:1351] Response Body: {"kind":"StorageClassList","apiVersion":"storage.k8s.io/v1","metadata":{"resourceVersion":"393"},"items":[{"metadata":{"name":"standard","uid":"f44a335b-ae9b-40d9-90b1-db69666be9fa","resourceVersion":"389","creationTimestamp":"2024-09-16T11:07:38Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:07:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kuberne
tes.io/last-applied-configuration":{},"f:storageclass.kubernetes.io/is- [truncated 249 chars]
	I0916 11:07:38.620828 2175536 request.go:1351] Request Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"f44a335b-ae9b-40d9-90b1-db69666be9fa","resourceVersion":"389","creationTimestamp":"2024-09-16T11:07:38Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:07:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storageclas
s.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 11:07:38.620911 2175536 round_trippers.go:463] PUT https://192.168.58.2:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 11:07:38.620925 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.620933 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.620943 2175536 round_trippers.go:473]     Content-Type: application/json
	I0916 11:07:38.620946 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.630264 2175536 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 11:07:38.630302 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.630311 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.630334 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.630345 2175536 round_trippers.go:580]     Content-Length: 291
	I0916 11:07:38.630349 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.630352 2175536 round_trippers.go:580]     Audit-Id: ca088995-9d42-423f-b4b0-acecc0697bbc
	I0916 11:07:38.630355 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.630358 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.630394 2175536 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"517b28ab-923f-4c66-9f57-81af25d1c992","resourceVersion":"394","creationTimestamp":"2024-09-16T11:07:32Z"},"spec":{"replicas":1},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:07:38.639251 2175536 round_trippers.go:574] Response Status: 200 OK in 23 milliseconds
	I0916 11:07:38.639287 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.639296 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.639300 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.639303 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.639306 2175536 round_trippers.go:580]     Audit-Id: 1fbdaac5-fe08-4e41-822c-b05050b8ba14
	I0916 11:07:38.639308 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.639311 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.639860 2175536 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"395"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-bb4db","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"3fd53b00-28ef-44ef-8541-097ebc870b2f","resourceVersion":"383","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 61241 chars]
	I0916 11:07:38.642421 2175536 round_trippers.go:574] Response Status: 200 OK in 21 milliseconds
	I0916 11:07:38.642447 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.642455 2175536 round_trippers.go:580]     Audit-Id: 5efc481d-8713-402a-a63f-978981e5a3c5
	I0916 11:07:38.642458 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.642461 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.642464 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.642468 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.642577 2175536 round_trippers.go:580]     Content-Length: 1220
	I0916 11:07:38.642583 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.642628 2175536 request.go:1351] Response Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"f44a335b-ae9b-40d9-90b1-db69666be9fa","resourceVersion":"389","creationTimestamp":"2024-09-16T11:07:38Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:07:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storagecla
ss.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 11:07:38.645446 2175536 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-bb4db" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:38.645584 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-bb4db
	I0916 11:07:38.645597 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.645606 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.645609 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.648071 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:38.648127 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.648141 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.648148 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.648155 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.648158 2175536 round_trippers.go:580]     Audit-Id: 00050f3a-dd7b-4cf7-9ea7-3e35763656d2
	I0916 11:07:38.648161 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.648170 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.648380 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-bb4db","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"3fd53b00-28ef-44ef-8541-097ebc870b2f","resourceVersion":"383","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:38.648924 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:38.648945 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.648953 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.648958 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.653613 2175536 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:07:38.653638 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.653646 2175536 round_trippers.go:580]     Audit-Id: c3710fc8-2f99-4c0a-b022-d30c8ed1273c
	I0916 11:07:38.653652 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.653655 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.653659 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.653662 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.653665 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.654153 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:38.871395 2175536 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0916 11:07:38.889747 2175536 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0916 11:07:38.910198 2175536 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0916 11:07:38.930443 2175536 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0916 11:07:38.943209 2175536 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0916 11:07:38.955414 2175536 command_runner.go:130] > pod/storage-provisioner created
	I0916 11:07:38.970871 2175536 out.go:177] * Enabled addons: default-storageclass, storage-provisioner
	I0916 11:07:38.972884 2175536 addons.go:510] duration metric: took 1.288901926s for enable addons: enabled=[default-storageclass storage-provisioner]
	I0916 11:07:39.085387 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:07:39.085409 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:39.085419 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:39.085423 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:39.088200 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:39.088222 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:39.088230 2175536 round_trippers.go:580]     Content-Length: 291
	I0916 11:07:39.088236 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:39 GMT
	I0916 11:07:39.088241 2175536 round_trippers.go:580]     Audit-Id: 19ffed9d-16d2-409c-bfb3-41237962b67a
	I0916 11:07:39.088245 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:39.088248 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:39.088251 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:39.088254 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:39.088501 2175536 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"517b28ab-923f-4c66-9f57-81af25d1c992","resourceVersion":"405","creationTimestamp":"2024-09-16T11:07:32Z"},"spec":{"replicas":1},"status":{"replicas":1,"selector":"k8s-app=kube-dns"}}
	I0916 11:07:39.088603 2175536 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-890146" context rescaled to 1 replicas
	I0916 11:07:39.145943 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-bb4db
	I0916 11:07:39.146018 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:39.146032 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:39.146058 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:39.148854 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:39.148874 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:39.148883 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:39 GMT
	I0916 11:07:39.148888 2175536 round_trippers.go:580]     Audit-Id: b365c193-f3ab-4b28-81f4-06a5e9e7e5b6
	I0916 11:07:39.148892 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:39.148895 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:39.148898 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:39.148901 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:39.149560 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-bb4db","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"3fd53b00-28ef-44ef-8541-097ebc870b2f","resourceVersion":"397","creationTimestamp":"2024-09-16T11:07:37Z","deletionTimestamp":"2024-09-16T11:08:08Z","deletionGracePeriodSeconds":30,"labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:pod
AntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecutio [truncated 6438 chars]
	I0916 11:07:39.150216 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:39.150266 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:39.150291 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:39.150312 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:39.152832 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:39.152887 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:39.152919 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:39.152938 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:39.152952 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:39.152970 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:39.152997 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:39 GMT
	I0916 11:07:39.153014 2175536 round_trippers.go:580]     Audit-Id: 30488fb8-bd7b-41c2-9a34-cb4b4b9d465c
	I0916 11:07:39.153655 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:39.645732 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-bb4db
	I0916 11:07:39.645759 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:39.645768 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:39.645772 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:39.648445 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:39.648473 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:39.648482 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:39 GMT
	I0916 11:07:39.648489 2175536 round_trippers.go:580]     Audit-Id: 5be2ee27-86ee-4352-8a93-05c002488c36
	I0916 11:07:39.648492 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:39.648496 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:39.648498 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:39.648501 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:39.648663 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-bb4db","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"3fd53b00-28ef-44ef-8541-097ebc870b2f","resourceVersion":"397","creationTimestamp":"2024-09-16T11:07:37Z","deletionTimestamp":"2024-09-16T11:08:08Z","deletionGracePeriodSeconds":30,"labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:pod
AntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecutio [truncated 6438 chars]
	I0916 11:07:39.649244 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:39.649260 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:39.649269 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:39.649274 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:39.651575 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:39.651600 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:39.651608 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:39.651614 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:39.651619 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:39.651623 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:39 GMT
	I0916 11:07:39.651628 2175536 round_trippers.go:580]     Audit-Id: 9a3c6506-52db-42a2-a4af-1c1bbfaa92fe
	I0916 11:07:39.651632 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:39.652119 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:40.146336 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-bb4db
	I0916 11:07:40.146362 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:40.146374 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:40.146381 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:40.148782 2175536 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 11:07:40.148823 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:40.148832 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:40.148838 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:40.148842 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:40.148846 2175536 round_trippers.go:580]     Content-Length: 216
	I0916 11:07:40.148850 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:40 GMT
	I0916 11:07:40.148854 2175536 round_trippers.go:580]     Audit-Id: 5ab32d15-4dae-45a2-a551-9bf3ba96b47b
	I0916 11:07:40.148858 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:40.148887 2175536 request.go:1351] Response Body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"pods \"coredns-7c65d6cfc9-bb4db\" not found","reason":"NotFound","details":{"name":"coredns-7c65d6cfc9-bb4db","kind":"pods"},"code":404}
	I0916 11:07:40.149157 2175536 pod_ready.go:98] error getting pod "coredns-7c65d6cfc9-bb4db" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bb4db" not found
	I0916 11:07:40.149180 2175536 pod_ready.go:82] duration metric: took 1.503700038s for pod "coredns-7c65d6cfc9-bb4db" in "kube-system" namespace to be "Ready" ...
	E0916 11:07:40.149192 2175536 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "coredns-7c65d6cfc9-bb4db" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bb4db" not found
	I0916 11:07:40.149204 2175536 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:40.149276 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:40.149287 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:40.149295 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:40.149300 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:40.151765 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:40.151787 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:40.151796 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:40.151800 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:40.151804 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:40.151807 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:40.151810 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:40 GMT
	I0916 11:07:40.151813 2175536 round_trippers.go:580]     Audit-Id: 34ca6fc7-9d78-41c7-bac9-3f621a27542a
	I0916 11:07:40.152023 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:40.152601 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:40.152618 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:40.152627 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:40.152631 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:40.155026 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:40.155051 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:40.155060 2175536 round_trippers.go:580]     Audit-Id: 764d0383-512c-419b-8e3e-230374d932db
	I0916 11:07:40.155064 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:40.155067 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:40.155070 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:40.155073 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:40.155077 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:40 GMT
	I0916 11:07:40.155516 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:40.650209 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:40.650233 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:40.650243 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:40.650249 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:40.652601 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:40.652628 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:40.652636 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:40.652641 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:40.652644 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:40.652649 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:40 GMT
	I0916 11:07:40.652652 2175536 round_trippers.go:580]     Audit-Id: 1bd8ff28-3d0d-4fe7-8c36-446b92a7542e
	I0916 11:07:40.652654 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:40.652907 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:40.653460 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:40.653479 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:40.653488 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:40.653493 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:40.655599 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:40.655662 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:40.655686 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:40.655704 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:40.655736 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:40 GMT
	I0916 11:07:40.655758 2175536 round_trippers.go:580]     Audit-Id: 56b35a80-4ecd-4abd-835c-abd6596a50c1
	I0916 11:07:40.655775 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:40.655786 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:40.655938 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:41.149907 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:41.149937 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:41.149947 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:41.149952 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:41.152358 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:41.152392 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:41.152401 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:41.152405 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:41 GMT
	I0916 11:07:41.152409 2175536 round_trippers.go:580]     Audit-Id: 8b7a240b-1a4d-4031-a2a9-209c23947913
	I0916 11:07:41.152415 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:41.152419 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:41.152423 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:41.152679 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:41.153246 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:41.153262 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:41.153270 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:41.153275 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:41.155222 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:41.155272 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:41.155303 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:41 GMT
	I0916 11:07:41.155321 2175536 round_trippers.go:580]     Audit-Id: a57fca7e-ed29-4a1a-ba8f-d43730b173aa
	I0916 11:07:41.155353 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:41.155374 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:41.155385 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:41.155389 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:41.155552 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:41.650177 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:41.650208 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:41.650225 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:41.650229 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:41.652864 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:41.652961 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:41.652970 2175536 round_trippers.go:580]     Audit-Id: 9832851e-07a8-4c34-a483-bfa0e73c4f45
	I0916 11:07:41.652975 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:41.652978 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:41.652982 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:41.652987 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:41.652990 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:41 GMT
	I0916 11:07:41.653103 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:41.653667 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:41.653687 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:41.653696 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:41.653701 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:41.656006 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:41.656072 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:41.656097 2175536 round_trippers.go:580]     Audit-Id: f3d0e4f0-4c35-47c6-8331-feeb1ff72110
	I0916 11:07:41.656101 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:41.656104 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:41.656107 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:41.656110 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:41.656113 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:41 GMT
	I0916 11:07:41.656255 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:42.150375 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:42.150404 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:42.150415 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:42.150422 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:42.153554 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:42.153583 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:42.153593 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:42.153597 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:42.153600 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:42.153603 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:42.153606 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:42 GMT
	I0916 11:07:42.153609 2175536 round_trippers.go:580]     Audit-Id: 29651ac0-78f3-4159-bf3f-57eb5afa0075
	I0916 11:07:42.153951 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:42.154537 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:42.154555 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:42.154562 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:42.154567 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:42.157323 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:42.157350 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:42.157360 2175536 round_trippers.go:580]     Audit-Id: 7f87e003-ee2a-4503-bca5-eb000b894d54
	I0916 11:07:42.157365 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:42.157395 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:42.157400 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:42.157403 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:42.157408 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:42 GMT
	I0916 11:07:42.157911 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:42.158317 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:42.650097 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:42.650125 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:42.650135 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:42.650141 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:42.652718 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:42.652801 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:42.652823 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:42 GMT
	I0916 11:07:42.652842 2175536 round_trippers.go:580]     Audit-Id: e9319798-df4d-4305-bca0-735e18e44921
	I0916 11:07:42.652874 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:42.652898 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:42.652921 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:42.652933 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:42.653091 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:42.653726 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:42.653752 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:42.653762 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:42.653767 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:42.655841 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:42.655864 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:42.655873 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:42.655879 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:42 GMT
	I0916 11:07:42.655882 2175536 round_trippers.go:580]     Audit-Id: 9e2a34c4-fff5-4866-82e9-0231140dd2c6
	I0916 11:07:42.655885 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:42.655888 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:42.655899 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:42.656196 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:43.150410 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:43.150437 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:43.150447 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:43.150451 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:43.152996 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:43.153020 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:43.153030 2175536 round_trippers.go:580]     Audit-Id: 975eeb6c-ce5d-455d-a514-211c32f409b5
	I0916 11:07:43.153034 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:43.153037 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:43.153042 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:43.153045 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:43.153049 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:43 GMT
	I0916 11:07:43.153348 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:43.153928 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:43.153948 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:43.153957 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:43.153961 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:43.156294 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:43.156358 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:43.156381 2175536 round_trippers.go:580]     Audit-Id: 4841fd85-d136-42f7-8274-d51d7a4f33d6
	I0916 11:07:43.156400 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:43.156431 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:43.156452 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:43.156467 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:43.156484 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:43 GMT
	I0916 11:07:43.157073 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:43.650277 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:43.650300 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:43.650310 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:43.650315 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:43.652921 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:43.652946 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:43.652955 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:43.652971 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:43.652975 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:43 GMT
	I0916 11:07:43.652978 2175536 round_trippers.go:580]     Audit-Id: 3add61c7-2466-4b49-a2e5-ae5897022c2a
	I0916 11:07:43.652981 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:43.652984 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:43.653111 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:43.653715 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:43.653733 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:43.653752 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:43.653765 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:43.655796 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:43.655818 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:43.655826 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:43.655834 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:43.655848 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:43.655852 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:43 GMT
	I0916 11:07:43.655855 2175536 round_trippers.go:580]     Audit-Id: 6c2fd7c3-86e9-4032-be5f-d734803119e6
	I0916 11:07:43.655862 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:43.656279 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:44.149942 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:44.149969 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:44.149978 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:44.149983 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:44.152474 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:44.152533 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:44.152542 2175536 round_trippers.go:580]     Audit-Id: d98ef9be-7d86-41f6-a699-a7c21e36aa20
	I0916 11:07:44.152546 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:44.152549 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:44.152551 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:44.152556 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:44.152566 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:44 GMT
	I0916 11:07:44.152678 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:44.153211 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:44.153230 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:44.153239 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:44.153244 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:44.155272 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:44.155294 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:44.155309 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:44.155314 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:44.155317 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:44.155321 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:44 GMT
	I0916 11:07:44.155324 2175536 round_trippers.go:580]     Audit-Id: 6a6ff00f-94a0-4c8c-ba4e-9b6b0b861050
	I0916 11:07:44.155327 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:44.155504 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:44.649519 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:44.649543 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:44.649553 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:44.649560 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:44.651966 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:44.651990 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:44.651998 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:44.652002 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:44.652005 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:44.652010 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:44 GMT
	I0916 11:07:44.652014 2175536 round_trippers.go:580]     Audit-Id: 1cacca30-b09c-48fd-8b96-d756b6bbd471
	I0916 11:07:44.652021 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:44.652276 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:44.652836 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:44.652858 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:44.652867 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:44.652871 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:44.654970 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:44.654993 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:44.655002 2175536 round_trippers.go:580]     Audit-Id: 2fac3b73-4d07-46fa-839a-5b9e68aba17a
	I0916 11:07:44.655006 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:44.655010 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:44.655013 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:44.655017 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:44.655020 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:44 GMT
	I0916 11:07:44.655206 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:44.655582 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:45.150392 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:45.150428 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:45.150438 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:45.150444 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:45.153592 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:45.153622 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:45.153631 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:45 GMT
	I0916 11:07:45.153637 2175536 round_trippers.go:580]     Audit-Id: e0e7cbcb-711f-42a8-a345-553c54251c5c
	I0916 11:07:45.153642 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:45.153646 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:45.153649 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:45.153653 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:45.154385 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:45.155151 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:45.155186 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:45.155200 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:45.155206 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:45.157914 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:45.157937 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:45.157947 2175536 round_trippers.go:580]     Audit-Id: d8ad18d6-1d55-45ed-847e-e7017fe91285
	I0916 11:07:45.157959 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:45.157963 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:45.157967 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:45.157970 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:45.157974 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:45 GMT
	I0916 11:07:45.158586 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:45.649493 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:45.649516 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:45.649525 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:45.649531 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:45.652257 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:45.652352 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:45.652371 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:45 GMT
	I0916 11:07:45.652376 2175536 round_trippers.go:580]     Audit-Id: 28718db1-5ea1-434f-951f-e92c45a31d7f
	I0916 11:07:45.652380 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:45.652382 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:45.652385 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:45.652388 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:45.652594 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:45.653322 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:45.653341 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:45.653379 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:45.653388 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:45.656546 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:45.656576 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:45.656584 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:45.656589 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:45 GMT
	I0916 11:07:45.656592 2175536 round_trippers.go:580]     Audit-Id: 918bc634-55c7-47d8-bbed-18f85c66fbae
	I0916 11:07:45.656595 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:45.656604 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:45.656607 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:45.656716 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:46.149493 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:46.149517 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:46.149528 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:46.149532 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:46.151983 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:46.152010 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:46.152019 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:46.152023 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:46.152029 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:46 GMT
	I0916 11:07:46.152033 2175536 round_trippers.go:580]     Audit-Id: 01b124e8-64cf-49ca-a9f7-d841def34329
	I0916 11:07:46.152040 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:46.152043 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:46.152196 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:46.152764 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:46.152783 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:46.152792 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:46.152797 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:46.155185 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:46.155208 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:46.155218 2175536 round_trippers.go:580]     Audit-Id: d8cfd4cd-9192-4725-a28a-c60bd805bbc5
	I0916 11:07:46.155224 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:46.155228 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:46.155231 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:46.155234 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:46.155236 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:46 GMT
	I0916 11:07:46.155348 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:46.650197 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:46.650225 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:46.650236 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:46.650241 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:46.652588 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:46.652629 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:46.652638 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:46.652643 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:46 GMT
	I0916 11:07:46.652646 2175536 round_trippers.go:580]     Audit-Id: 4537c216-61e2-4212-9da2-a20f475f3ca7
	I0916 11:07:46.652648 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:46.652651 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:46.652654 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:46.652835 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:46.653401 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:46.653419 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:46.653428 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:46.653434 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:46.655538 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:46.655559 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:46.655567 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:46.655571 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:46.655574 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:46.655578 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:46.655581 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:46 GMT
	I0916 11:07:46.655585 2175536 round_trippers.go:580]     Audit-Id: 4353c81e-7c05-4b0c-8f39-6b093875fc66
	I0916 11:07:46.655682 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:46.656046 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:47.149456 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:47.149480 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:47.149490 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:47.149494 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:47.151933 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:47.151955 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:47.151964 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:47.151967 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:47.151970 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:47.151973 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:47.151975 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:47 GMT
	I0916 11:07:47.151978 2175536 round_trippers.go:580]     Audit-Id: 556f65df-bc59-4c2c-aaba-aea2d0117b1b
	I0916 11:07:47.152101 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:47.152632 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:47.152655 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:47.152664 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:47.152668 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:47.154721 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:47.154741 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:47.154748 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:47.154753 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:47.154756 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:47.154760 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:47.154764 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:47 GMT
	I0916 11:07:47.154767 2175536 round_trippers.go:580]     Audit-Id: ce240a75-c622-4304-b4c1-45745fe358af
	I0916 11:07:47.154849 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:47.649417 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:47.649444 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:47.649454 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:47.649459 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:47.652176 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:47.652200 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:47.652208 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:47.652212 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:47 GMT
	I0916 11:07:47.652215 2175536 round_trippers.go:580]     Audit-Id: fdb90aa5-51ed-4f8b-a18f-24770c9031d3
	I0916 11:07:47.652218 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:47.652221 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:47.652224 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:47.652540 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:47.653089 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:47.653107 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:47.653117 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:47.653121 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:47.655203 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:47.655221 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:47.655229 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:47.655238 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:47.655241 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:47.655244 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:47.655248 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:47 GMT
	I0916 11:07:47.655250 2175536 round_trippers.go:580]     Audit-Id: 318fafc3-8aec-4595-a5f6-488f717c1b24
	I0916 11:07:47.655556 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:48.150441 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:48.150482 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:48.150496 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:48.150504 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:48.153149 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:48.153174 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:48.153183 2175536 round_trippers.go:580]     Audit-Id: ffb82b1b-7700-4a59-bb9a-e4b74786db89
	I0916 11:07:48.153189 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:48.153193 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:48.153198 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:48.153201 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:48.153204 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:48 GMT
	I0916 11:07:48.153650 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:48.154208 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:48.154225 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:48.154234 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:48.154238 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:48.156610 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:48.156634 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:48.156644 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:48.156648 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:48.156651 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:48.156654 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:48.156657 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:48 GMT
	I0916 11:07:48.156662 2175536 round_trippers.go:580]     Audit-Id: 385ece48-691f-4228-b168-b1a96d093553
	I0916 11:07:48.156973 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:48.650156 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:48.650183 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:48.650193 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:48.650199 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:48.652559 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:48.652582 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:48.652590 2175536 round_trippers.go:580]     Audit-Id: ac00f509-246d-47e2-b97b-2af32ddecc5b
	I0916 11:07:48.652594 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:48.652598 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:48.652603 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:48.652607 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:48.652611 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:48 GMT
	I0916 11:07:48.652738 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:48.653267 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:48.653276 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:48.653286 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:48.653290 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:48.655187 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:48.655205 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:48.655213 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:48.655217 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:48 GMT
	I0916 11:07:48.655220 2175536 round_trippers.go:580]     Audit-Id: ea21718a-6d06-4b0d-82f4-fd50b1b7300b
	I0916 11:07:48.655223 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:48.655226 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:48.655228 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:48.655326 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:49.149390 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:49.149417 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:49.149427 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:49.149432 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:49.151841 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:49.151909 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:49.151946 2175536 round_trippers.go:580]     Audit-Id: 12353c7b-2061-4868-b2d9-9e0c9ffec00e
	I0916 11:07:49.151970 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:49.151989 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:49.151993 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:49.151996 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:49.151999 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:49 GMT
	I0916 11:07:49.152137 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:49.152686 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:49.152704 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:49.152713 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:49.152720 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:49.154872 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:49.154898 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:49.154907 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:49.154911 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:49.154915 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:49 GMT
	I0916 11:07:49.154919 2175536 round_trippers.go:580]     Audit-Id: b3433736-11e5-40ea-bc6a-6cd2d25b76d3
	I0916 11:07:49.154922 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:49.154925 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:49.155227 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:49.155597 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:49.649840 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:49.649865 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:49.649875 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:49.649879 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:49.652392 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:49.652417 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:49.652427 2175536 round_trippers.go:580]     Audit-Id: 426581de-530c-412d-aac0-21da6a2d9cb3
	I0916 11:07:49.652431 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:49.652434 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:49.652437 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:49.652440 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:49.652442 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:49 GMT
	I0916 11:07:49.652748 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:49.653293 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:49.653314 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:49.653323 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:49.653328 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:49.655446 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:49.655464 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:49.655474 2175536 round_trippers.go:580]     Audit-Id: 22279a98-0ff3-4d5c-88ff-04fb85625866
	I0916 11:07:49.655478 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:49.655483 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:49.655486 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:49.655489 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:49.655492 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:49 GMT
	I0916 11:07:49.655616 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:50.150410 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:50.150437 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:50.150449 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:50.150454 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:50.152861 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:50.152893 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:50.152902 2175536 round_trippers.go:580]     Audit-Id: 2f55421a-e0c5-4c55-8be9-5c3f4cadae6f
	I0916 11:07:50.152908 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:50.152912 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:50.152917 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:50.152921 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:50.152924 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:50 GMT
	I0916 11:07:50.153202 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:50.153782 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:50.153802 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:50.153812 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:50.153821 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:50.156141 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:50.156205 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:50.156221 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:50.156224 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:50.156229 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:50.156232 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:50.156235 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:50 GMT
	I0916 11:07:50.156238 2175536 round_trippers.go:580]     Audit-Id: 6ad7be8a-4e1d-4296-9ac3-b3a10e601ecd
	I0916 11:07:50.156650 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:50.650195 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:50.650227 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:50.650237 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:50.650241 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:50.652612 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:50.652638 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:50.652647 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:50.652651 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:50.652655 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:50.652660 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:50 GMT
	I0916 11:07:50.652663 2175536 round_trippers.go:580]     Audit-Id: d1588a98-5762-41d2-8fbf-f805d2251443
	I0916 11:07:50.652667 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:50.652980 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:50.653556 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:50.653575 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:50.653583 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:50.653588 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:50.655606 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:50.655661 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:50.655683 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:50.655696 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:50.655700 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:50 GMT
	I0916 11:07:50.655703 2175536 round_trippers.go:580]     Audit-Id: 5d184465-404c-4d70-875d-7374b0d015bd
	I0916 11:07:50.655706 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:50.655709 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:50.655848 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:51.150021 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:51.150048 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:51.150058 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:51.150062 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:51.152456 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:51.152482 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:51.152490 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:51 GMT
	I0916 11:07:51.152495 2175536 round_trippers.go:580]     Audit-Id: 97ab8e37-04b4-486e-9d0d-ba4eaee01b7b
	I0916 11:07:51.152499 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:51.152502 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:51.152505 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:51.152508 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:51.152621 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:51.153155 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:51.153165 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:51.153174 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:51.153179 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:51.155315 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:51.155410 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:51.155435 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:51 GMT
	I0916 11:07:51.155453 2175536 round_trippers.go:580]     Audit-Id: f7e627f0-47ce-48f3-8ecf-3c160e7f21a7
	I0916 11:07:51.155488 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:51.155506 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:51.155520 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:51.155535 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:51.155658 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:51.156056 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:51.650211 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:51.650235 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:51.650245 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:51.650251 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:51.652811 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:51.652838 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:51.652855 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:51.652861 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:51.652864 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:51.652867 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:51 GMT
	I0916 11:07:51.652870 2175536 round_trippers.go:580]     Audit-Id: 4f233f57-0150-4db0-8fcd-d43b8f3ab30e
	I0916 11:07:51.652872 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:51.653051 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:51.653695 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:51.653717 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:51.653726 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:51.653731 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:51.656052 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:51.656076 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:51.656084 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:51.656089 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:51 GMT
	I0916 11:07:51.656093 2175536 round_trippers.go:580]     Audit-Id: 496624f5-ef35-4df4-b702-5b48670bc1e6
	I0916 11:07:51.656096 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:51.656100 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:51.656103 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:51.656360 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:52.150059 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:52.150082 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:52.150092 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:52.150096 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:52.152544 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:52.152614 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:52.152637 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:52.152656 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:52.152670 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:52.152705 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:52.152720 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:52 GMT
	I0916 11:07:52.152737 2175536 round_trippers.go:580]     Audit-Id: 4567b755-eec3-4bc9-b5b1-fc9df1641b4c
	I0916 11:07:52.152907 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:52.153488 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:52.153504 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:52.153513 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:52.153538 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:52.155461 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:52.155499 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:52.155508 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:52.155512 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:52.155516 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:52.155519 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:52.155523 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:52 GMT
	I0916 11:07:52.155526 2175536 round_trippers.go:580]     Audit-Id: 0112f3dc-0499-48d5-acbf-e85824b0baa7
	I0916 11:07:52.155858 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:52.649461 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:52.649488 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:52.649498 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:52.649503 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:52.651861 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:52.651883 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:52.651892 2175536 round_trippers.go:580]     Audit-Id: 2c5e38e7-dcd0-447c-9199-acb54c5b7c54
	I0916 11:07:52.651897 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:52.651900 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:52.651905 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:52.651908 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:52.651910 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:52 GMT
	I0916 11:07:52.652224 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:52.652765 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:52.652783 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:52.652792 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:52.652796 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:52.654897 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:52.654916 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:52.654924 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:52.654928 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:52.654932 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:52.654935 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:52 GMT
	I0916 11:07:52.654939 2175536 round_trippers.go:580]     Audit-Id: 698426f6-3eb5-450d-b537-97fc1c1ed382
	I0916 11:07:52.654942 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:52.655432 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:53.149704 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:53.149726 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:53.149736 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:53.149740 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:53.152185 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:53.152209 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:53.152218 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:53 GMT
	I0916 11:07:53.152223 2175536 round_trippers.go:580]     Audit-Id: 8073137f-825d-4d82-9556-fce6c21a17de
	I0916 11:07:53.152228 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:53.152232 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:53.152234 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:53.152237 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:53.152560 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:53.153112 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:53.153131 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:53.153139 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:53.153145 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:53.155230 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:53.155251 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:53.155259 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:53.155263 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:53 GMT
	I0916 11:07:53.155268 2175536 round_trippers.go:580]     Audit-Id: 11f206f8-062c-405b-a400-76d3a3093c8a
	I0916 11:07:53.155271 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:53.155275 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:53.155281 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:53.155735 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:53.156105 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:53.649819 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:53.649851 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:53.649862 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:53.649866 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:53.652347 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:53.652372 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:53.652381 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:53.652385 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:53.652389 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:53.652392 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:53.652395 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:53 GMT
	I0916 11:07:53.652398 2175536 round_trippers.go:580]     Audit-Id: 89db465a-9a82-4dd1-a225-cae09326689e
	I0916 11:07:53.652750 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:53.653289 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:53.653309 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:53.653318 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:53.653323 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:53.657048 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:53.657070 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:53.657079 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:53 GMT
	I0916 11:07:53.657083 2175536 round_trippers.go:580]     Audit-Id: 6a0236e7-4987-453b-8cbd-92fcb09a53b1
	I0916 11:07:53.657086 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:53.657089 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:53.657092 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:53.657100 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:53.657469 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.150185 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:54.150218 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.150231 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.150238 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.152704 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.152725 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.152734 2175536 round_trippers.go:580]     Audit-Id: cbb9f7ff-944f-4751-85c3-e51dfcfaf20c
	I0916 11:07:54.152738 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.152741 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.152744 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.152747 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.152750 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.152888 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6480 chars]
	I0916 11:07:54.153440 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.153451 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.153459 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.153464 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.155559 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.155624 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.155634 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.155640 2175536 round_trippers.go:580]     Audit-Id: 8fbfb155-7139-4376-ae3f-f8b520c396fa
	I0916 11:07:54.155643 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.155646 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.155650 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.155654 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.155799 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.156182 2175536 pod_ready.go:93] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.156203 2175536 pod_ready.go:82] duration metric: took 14.006983415s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.156216 2175536 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.156277 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-890146
	I0916 11:07:54.156286 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.156294 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.156298 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.158347 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.158368 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.158376 2175536 round_trippers.go:580]     Audit-Id: ef48c66a-e4aa-43a9-924a-2cc9cb3f8dec
	I0916 11:07:54.158379 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.158385 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.158388 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.158391 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.158394 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.158662 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-890146","namespace":"kube-system","uid":"59c960ab-f6dd-4ed3-856c-ba2a02295b12","resourceVersion":"327","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.58.2:2379","kubernetes.io/config.hash":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.mirror":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.seen":"2024-09-16T11:07:32.783608135Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6440 chars]
	I0916 11:07:54.159182 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.159202 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.159211 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.159214 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.161577 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.161644 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.161666 2175536 round_trippers.go:580]     Audit-Id: 77bd7f3b-d8fb-49df-9145-f93ccbe0398c
	I0916 11:07:54.161686 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.161717 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.161746 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.161754 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.161758 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.161865 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.162334 2175536 pod_ready.go:93] pod "etcd-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.162354 2175536 pod_ready.go:82] duration metric: took 6.130488ms for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.162368 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.162447 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-890146
	I0916 11:07:54.162457 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.162467 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.162471 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.164797 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.164823 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.164832 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.164836 2175536 round_trippers.go:580]     Audit-Id: 45927122-940d-4178-940b-cef2ca51fe82
	I0916 11:07:54.164840 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.164845 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.164848 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.164851 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.165221 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-890146","namespace":"kube-system","uid":"9846229d-7227-453f-8c30-697aaba61648","resourceVersion":"432","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.58.2:8443","kubernetes.io/config.hash":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.mirror":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.seen":"2024-09-16T11:07:32.783610957Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8518 chars]
	I0916 11:07:54.165794 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.165812 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.165822 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.165829 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.167920 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.167944 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.167952 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.167956 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.167959 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.167962 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.167989 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.167998 2175536 round_trippers.go:580]     Audit-Id: 2f394629-cd1e-44f4-8b79-7f8dc8770370
	I0916 11:07:54.168099 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.168506 2175536 pod_ready.go:93] pod "kube-apiserver-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.168527 2175536 pod_ready.go:82] duration metric: took 6.14903ms for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.168539 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.168611 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-890146
	I0916 11:07:54.168621 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.168630 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.168634 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.170621 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:54.170640 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.170649 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.170655 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.170659 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.170663 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.170666 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.170669 2175536 round_trippers.go:580]     Audit-Id: d4ed699a-ea5d-4917-bcf1-e2df94795316
	I0916 11:07:54.170835 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-890146","namespace":"kube-system","uid":"421b15a5-a8e2-4631-bf18-1592c8747b09","resourceVersion":"436","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.mirror":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.seen":"2024-09-16T11:07:32.783594137Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8093 chars]
	I0916 11:07:54.171380 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.171390 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.171399 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.171403 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.173326 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:54.173382 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.173403 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.173423 2175536 round_trippers.go:580]     Audit-Id: 24f1a1f4-41fa-430f-9230-5c5410a1ec9e
	I0916 11:07:54.173459 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.173480 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.173496 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.173510 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.173647 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.174067 2175536 pod_ready.go:93] pod "kube-controller-manager-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.174087 2175536 pod_ready.go:82] duration metric: took 5.537457ms for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.174098 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.174178 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:07:54.174189 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.174199 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.174205 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.176508 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.176572 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.176596 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.176616 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.176649 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.176682 2175536 round_trippers.go:580]     Audit-Id: 6b5f980f-a01d-4fe8-a85a-7adf75f72f4c
	I0916 11:07:54.176691 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.176694 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.176823 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"412","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6175 chars]
	I0916 11:07:54.177370 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.177391 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.177402 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.177406 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.179591 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.179660 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.179683 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.179703 2175536 round_trippers.go:580]     Audit-Id: d3e95d93-af27-478e-81dc-a8c630bcfcb7
	I0916 11:07:54.179734 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.179759 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.179777 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.179795 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.180432 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.180849 2175536 pod_ready.go:93] pod "kube-proxy-fm5qr" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.180869 2175536 pod_ready.go:82] duration metric: took 6.764043ms for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.180897 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.351235 2175536 request.go:632] Waited for 170.265368ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:07:54.351323 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:07:54.351336 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.351345 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.351352 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.353696 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.353719 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.353727 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.353733 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.353737 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.353741 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.353745 2175536 round_trippers.go:580]     Audit-Id: f39103d4-6912-44e3-b8e3-9457f873d483
	I0916 11:07:54.353748 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.353996 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"438","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 4975 chars]
	I0916 11:07:54.550788 2175536 request.go:632] Waited for 196.346694ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.550915 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.550932 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.550942 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.550947 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.553266 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.553290 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.553299 2175536 round_trippers.go:580]     Audit-Id: cae5e5e1-ff1a-4e25-9a40-48f48898c14e
	I0916 11:07:54.553304 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.553307 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.553333 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.553343 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.553348 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.554047 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.554445 2175536 pod_ready.go:93] pod "kube-scheduler-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.554464 2175536 pod_ready.go:82] duration metric: took 373.554532ms for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.554473 2175536 pod_ready.go:39] duration metric: took 15.938452327s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:07:54.554491 2175536 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:07:54.554567 2175536 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:07:54.566420 2175536 command_runner.go:130] > 1444
	I0916 11:07:54.566454 2175536 api_server.go:72] duration metric: took 16.88282404s to wait for apiserver process to appear ...
	I0916 11:07:54.566465 2175536 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:07:54.566484 2175536 api_server.go:253] Checking apiserver healthz at https://192.168.58.2:8443/healthz ...
	I0916 11:07:54.574483 2175536 api_server.go:279] https://192.168.58.2:8443/healthz returned 200:
	ok
	I0916 11:07:54.574557 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/version
	I0916 11:07:54.574563 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.574572 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.574582 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.575633 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:54.575652 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.575660 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.575664 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.575666 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.575670 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.575673 2175536 round_trippers.go:580]     Content-Length: 263
	I0916 11:07:54.575676 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.575679 2175536 round_trippers.go:580]     Audit-Id: eced1287-bfee-4134-87fc-867965913dd1
	I0916 11:07:54.575694 2175536 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 11:07:54.575787 2175536 api_server.go:141] control plane version: v1.31.1
	I0916 11:07:54.575806 2175536 api_server.go:131] duration metric: took 9.334729ms to wait for apiserver health ...
	I0916 11:07:54.575814 2175536 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:07:54.750271 2175536 request.go:632] Waited for 174.388924ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:07:54.750332 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:07:54.750350 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.750379 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.750384 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.753725 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:54.753751 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.753760 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.753764 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.753769 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.753773 2175536 round_trippers.go:580]     Audit-Id: 871bb79a-785f-4f33-b7d4-3763b7046d46
	I0916 11:07:54.753777 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.753780 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.755048 2175536 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"460"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 58808 chars]
	I0916 11:07:54.758249 2175536 system_pods.go:59] 8 kube-system pods found
	I0916 11:07:54.758283 2175536 system_pods.go:61] "coredns-7c65d6cfc9-vp22b" [a6adb735-448b-480b-aba1-3ce4d56c6fc7] Running
	I0916 11:07:54.758290 2175536 system_pods.go:61] "etcd-multinode-890146" [59c960ab-f6dd-4ed3-856c-ba2a02295b12] Running
	I0916 11:07:54.758296 2175536 system_pods.go:61] "kindnet-dbrhk" [24ef6be7-a1ab-41f7-83c8-aa5af5007281] Running
	I0916 11:07:54.758301 2175536 system_pods.go:61] "kube-apiserver-multinode-890146" [9846229d-7227-453f-8c30-697aaba61648] Running
	I0916 11:07:54.758322 2175536 system_pods.go:61] "kube-controller-manager-multinode-890146" [421b15a5-a8e2-4631-bf18-1592c8747b09] Running
	I0916 11:07:54.758327 2175536 system_pods.go:61] "kube-proxy-fm5qr" [8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73] Running
	I0916 11:07:54.758332 2175536 system_pods.go:61] "kube-scheduler-multinode-890146" [6d440ef3-2e6e-4db9-8c28-2417f7a80c9f] Running
	I0916 11:07:54.758343 2175536 system_pods.go:61] "storage-provisioner" [97795413-5c7a-480b-9cbd-18d4dea5669b] Running
	I0916 11:07:54.758349 2175536 system_pods.go:74] duration metric: took 182.527097ms to wait for pod list to return data ...
	I0916 11:07:54.758362 2175536 default_sa.go:34] waiting for default service account to be created ...
	I0916 11:07:54.950271 2175536 request.go:632] Waited for 191.819086ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:07:54.950341 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:07:54.950351 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.950360 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.950367 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.953007 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.953036 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.953045 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.953049 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.953053 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.953058 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.953061 2175536 round_trippers.go:580]     Content-Length: 261
	I0916 11:07:54.953064 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.953067 2175536 round_trippers.go:580]     Audit-Id: 4c463ba2-1ab4-4e04-94c3-b9d58a2613c4
	I0916 11:07:54.953088 2175536 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"460"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"05c19a8a-7c83-4ce8-b18d-3bc9431ca644","resourceVersion":"353","creationTimestamp":"2024-09-16T11:07:37Z"}}]}
	I0916 11:07:54.953273 2175536 default_sa.go:45] found service account: "default"
	I0916 11:07:54.953296 2175536 default_sa.go:55] duration metric: took 194.927755ms for default service account to be created ...
	I0916 11:07:54.953308 2175536 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 11:07:55.150590 2175536 request.go:632] Waited for 197.212256ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:07:55.150669 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:07:55.150715 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:55.150726 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:55.150735 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:55.153817 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:55.153913 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:55.153931 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:55.153937 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:55.153942 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:55.153946 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:55 GMT
	I0916 11:07:55.153951 2175536 round_trippers.go:580]     Audit-Id: 746e87a7-e88f-44c6-b990-79d69fb2aba3
	I0916 11:07:55.153955 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:55.154362 2175536 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"460"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 58808 chars]
	I0916 11:07:55.156980 2175536 system_pods.go:86] 8 kube-system pods found
	I0916 11:07:55.157012 2175536 system_pods.go:89] "coredns-7c65d6cfc9-vp22b" [a6adb735-448b-480b-aba1-3ce4d56c6fc7] Running
	I0916 11:07:55.157020 2175536 system_pods.go:89] "etcd-multinode-890146" [59c960ab-f6dd-4ed3-856c-ba2a02295b12] Running
	I0916 11:07:55.157026 2175536 system_pods.go:89] "kindnet-dbrhk" [24ef6be7-a1ab-41f7-83c8-aa5af5007281] Running
	I0916 11:07:55.157032 2175536 system_pods.go:89] "kube-apiserver-multinode-890146" [9846229d-7227-453f-8c30-697aaba61648] Running
	I0916 11:07:55.157038 2175536 system_pods.go:89] "kube-controller-manager-multinode-890146" [421b15a5-a8e2-4631-bf18-1592c8747b09] Running
	I0916 11:07:55.157043 2175536 system_pods.go:89] "kube-proxy-fm5qr" [8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73] Running
	I0916 11:07:55.157047 2175536 system_pods.go:89] "kube-scheduler-multinode-890146" [6d440ef3-2e6e-4db9-8c28-2417f7a80c9f] Running
	I0916 11:07:55.157051 2175536 system_pods.go:89] "storage-provisioner" [97795413-5c7a-480b-9cbd-18d4dea5669b] Running
	I0916 11:07:55.157098 2175536 system_pods.go:126] duration metric: took 203.779648ms to wait for k8s-apps to be running ...
	I0916 11:07:55.157115 2175536 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:07:55.157190 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:07:55.170989 2175536 system_svc.go:56] duration metric: took 13.865389ms WaitForService to wait for kubelet
	I0916 11:07:55.171018 2175536 kubeadm.go:582] duration metric: took 17.487386673s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:07:55.171038 2175536 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:07:55.350348 2175536 request.go:632] Waited for 179.235487ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes
	I0916 11:07:55.350407 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes
	I0916 11:07:55.350413 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:55.350422 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:55.350428 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:55.353203 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:55.353224 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:55.353232 2175536 round_trippers.go:580]     Audit-Id: 6f008049-ddcf-4dcc-9e47-1eece76d2011
	I0916 11:07:55.353236 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:55.353240 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:55.353244 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:55.353262 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:55.353266 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:55 GMT
	I0916 11:07:55.353396 2175536 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"460"},"items":[{"metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"manag
edFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1", [truncated 5156 chars]
	I0916 11:07:55.353893 2175536 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:07:55.353919 2175536 node_conditions.go:123] node cpu capacity is 2
	I0916 11:07:55.353930 2175536 node_conditions.go:105] duration metric: took 182.886291ms to run NodePressure ...
	I0916 11:07:55.353943 2175536 start.go:241] waiting for startup goroutines ...
	I0916 11:07:55.353950 2175536 start.go:246] waiting for cluster config update ...
	I0916 11:07:55.353965 2175536 start.go:255] writing updated cluster config ...
	I0916 11:07:55.356664 2175536 out.go:201] 
	I0916 11:07:55.358753 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:07:55.358847 2175536 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:07:55.361296 2175536 out.go:177] * Starting "multinode-890146-m02" worker node in "multinode-890146" cluster
	I0916 11:07:55.363441 2175536 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:07:55.365739 2175536 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:07:55.368296 2175536 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:07:55.368335 2175536 cache.go:56] Caching tarball of preloaded images
	I0916 11:07:55.368385 2175536 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:07:55.368448 2175536 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 11:07:55.368465 2175536 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 11:07:55.368555 2175536 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	W0916 11:07:55.387660 2175536 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:07:55.387681 2175536 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:07:55.387779 2175536 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:07:55.387802 2175536 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:07:55.387807 2175536 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:07:55.387815 2175536 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:07:55.387820 2175536 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:07:55.388995 2175536 image.go:273] response: 
	I0916 11:07:55.502327 2175536 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:07:55.502368 2175536 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:07:55.502398 2175536 start.go:360] acquireMachinesLock for multinode-890146-m02: {Name:mkb193e5e8454b4e97e0a3d9e40e1ee2de147629 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:07:55.502517 2175536 start.go:364] duration metric: took 98.018µs to acquireMachinesLock for "multinode-890146-m02"
	I0916 11:07:55.502548 2175536 start.go:93] Provisioning new machine with config: &{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount
9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}
	I0916 11:07:55.502637 2175536 start.go:125] createHost starting for "m02" (driver="docker")
	I0916 11:07:55.506447 2175536 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 11:07:55.506610 2175536 start.go:159] libmachine.API.Create for "multinode-890146" (driver="docker")
	I0916 11:07:55.506651 2175536 client.go:168] LocalClient.Create starting
	I0916 11:07:55.506780 2175536 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 11:07:55.506830 2175536 main.go:141] libmachine: Decoding PEM data...
	I0916 11:07:55.506849 2175536 main.go:141] libmachine: Parsing certificate...
	I0916 11:07:55.506914 2175536 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 11:07:55.506936 2175536 main.go:141] libmachine: Decoding PEM data...
	I0916 11:07:55.506954 2175536 main.go:141] libmachine: Parsing certificate...
	I0916 11:07:55.507280 2175536 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:07:55.521699 2175536 network_create.go:77] Found existing network {name:multinode-890146 subnet:0x40012521e0 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 58 1] mtu:1500}
	I0916 11:07:55.521747 2175536 kic.go:121] calculated static IP "192.168.58.3" for the "multinode-890146-m02" container
	I0916 11:07:55.521818 2175536 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:07:55.537755 2175536 cli_runner.go:164] Run: docker volume create multinode-890146-m02 --label name.minikube.sigs.k8s.io=multinode-890146-m02 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:07:55.554187 2175536 oci.go:103] Successfully created a docker volume multinode-890146-m02
	I0916 11:07:55.554273 2175536 cli_runner.go:164] Run: docker run --rm --name multinode-890146-m02-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-890146-m02 --entrypoint /usr/bin/test -v multinode-890146-m02:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:07:56.125492 2175536 oci.go:107] Successfully prepared a docker volume multinode-890146-m02
	I0916 11:07:56.125538 2175536 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:07:56.125558 2175536 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:07:56.125628 2175536 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-890146-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:08:00.302138 2175536 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-890146-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.176450501s)
	I0916 11:08:00.302172 2175536 kic.go:203] duration metric: took 4.176608489s to extract preloaded images to volume ...
	W0916 11:08:00.302330 2175536 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:08:00.302447 2175536 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:08:00.444886 2175536 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname multinode-890146-m02 --name multinode-890146-m02 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-890146-m02 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=multinode-890146-m02 --network multinode-890146 --ip 192.168.58.3 --volume multinode-890146-m02:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:08:00.843116 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Running}}
	I0916 11:08:00.867003 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:08:00.894286 2175536 cli_runner.go:164] Run: docker exec multinode-890146-m02 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:08:00.951628 2175536 oci.go:144] the created container "multinode-890146-m02" has a running status.
	I0916 11:08:00.951665 2175536 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa...
	I0916 11:08:01.178266 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 11:08:01.178358 2175536 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:08:01.208797 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:08:01.235936 2175536 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:08:01.235957 2175536 kic_runner.go:114] Args: [docker exec --privileged multinode-890146-m02 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:08:01.324883 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:08:01.350434 2175536 machine.go:93] provisionDockerMachine start ...
	I0916 11:08:01.350531 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:01.384040 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:08:01.384327 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40722 <nil> <nil>}
	I0916 11:08:01.384336 2175536 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:08:01.385217 2175536 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:56210->127.0.0.1:40722: read: connection reset by peer
	I0916 11:08:04.522325 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146-m02
	
	I0916 11:08:04.522351 2175536 ubuntu.go:169] provisioning hostname "multinode-890146-m02"
	I0916 11:08:04.522426 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:04.539702 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:08:04.539946 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40722 <nil> <nil>}
	I0916 11:08:04.539965 2175536 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-890146-m02 && echo "multinode-890146-m02" | sudo tee /etc/hostname
	I0916 11:08:04.691976 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146-m02
	
	I0916 11:08:04.692065 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:04.713951 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:08:04.714195 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40722 <nil> <nil>}
	I0916 11:08:04.714213 2175536 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-890146-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-890146-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-890146-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:08:04.850953 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:08:04.850990 2175536 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 11:08:04.851006 2175536 ubuntu.go:177] setting up certificates
	I0916 11:08:04.851017 2175536 provision.go:84] configureAuth start
	I0916 11:08:04.851079 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:08:04.869185 2175536 provision.go:143] copyHostCerts
	I0916 11:08:04.869232 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:08:04.869267 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 11:08:04.869279 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:08:04.869360 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 11:08:04.869446 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:08:04.869468 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 11:08:04.869473 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:08:04.869500 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 11:08:04.869544 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:08:04.869566 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 11:08:04.869575 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:08:04.869602 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 11:08:04.869656 2175536 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.multinode-890146-m02 san=[127.0.0.1 192.168.58.3 localhost minikube multinode-890146-m02]
	I0916 11:08:05.328262 2175536 provision.go:177] copyRemoteCerts
	I0916 11:08:05.328334 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:08:05.328379 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:05.345782 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40722 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:08:05.448131 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:08:05.448198 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1229 bytes)
	I0916 11:08:05.473486 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:08:05.473555 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:08:05.499338 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:08:05.499407 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 11:08:05.525060 2175536 provision.go:87] duration metric: took 674.028955ms to configureAuth
	I0916 11:08:05.525086 2175536 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:08:05.525287 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:08:05.525301 2175536 machine.go:96] duration metric: took 4.174844267s to provisionDockerMachine
	I0916 11:08:05.525311 2175536 client.go:171] duration metric: took 10.018650112s to LocalClient.Create
	I0916 11:08:05.525333 2175536 start.go:167] duration metric: took 10.018724344s to libmachine.API.Create "multinode-890146"
	I0916 11:08:05.525345 2175536 start.go:293] postStartSetup for "multinode-890146-m02" (driver="docker")
	I0916 11:08:05.525355 2175536 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:08:05.525436 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:08:05.525481 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:05.542154 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40722 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:08:05.647159 2175536 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:08:05.651393 2175536 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:08:05.651414 2175536 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:08:05.651429 2175536 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:08:05.651434 2175536 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:08:05.651439 2175536 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:08:05.651443 2175536 command_runner.go:130] > ID=ubuntu
	I0916 11:08:05.651447 2175536 command_runner.go:130] > ID_LIKE=debian
	I0916 11:08:05.651452 2175536 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:08:05.651460 2175536 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:08:05.651472 2175536 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:08:05.651480 2175536 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:08:05.651487 2175536 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:08:05.651561 2175536 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:08:05.651593 2175536 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:08:05.651607 2175536 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:08:05.651620 2175536 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:08:05.651636 2175536 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 11:08:05.651707 2175536 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 11:08:05.651805 2175536 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 11:08:05.651813 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 11:08:05.651923 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:08:05.661612 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:08:05.687361 2175536 start.go:296] duration metric: took 162.000013ms for postStartSetup
	I0916 11:08:05.687789 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:08:05.704259 2175536 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:08:05.704570 2175536 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:08:05.704624 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:05.720743 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40722 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:08:05.815588 2175536 command_runner.go:130] > 21%
	I0916 11:08:05.815671 2175536 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:08:05.819885 2175536 command_runner.go:130] > 154G
	I0916 11:08:05.820327 2175536 start.go:128] duration metric: took 10.317676147s to createHost
	I0916 11:08:05.820349 2175536 start.go:83] releasing machines lock for "multinode-890146-m02", held for 10.31781844s
	I0916 11:08:05.820424 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:08:05.840029 2175536 out.go:177] * Found network options:
	I0916 11:08:05.842151 2175536 out.go:177]   - NO_PROXY=192.168.58.2
	W0916 11:08:05.844425 2175536 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:08:05.844485 2175536 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:08:05.844565 2175536 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:08:05.844613 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:05.844891 2175536 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:08:05.844951 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:05.865699 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40722 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:08:05.867089 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40722 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:08:05.959825 2175536 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:08:05.959899 2175536 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:08:05.959928 2175536 command_runner.go:130] > Device: f4h/244d	Inode: 1301117     Links: 1
	I0916 11:08:05.959946 2175536 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:08:05.959975 2175536 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:08:05.960021 2175536 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:08:05.960051 2175536 command_runner.go:130] > Change: 2024-09-16 10:29:56.970000846 +0000
	I0916 11:08:05.960070 2175536 command_runner.go:130] >  Birth: 2024-09-16 10:29:56.970000846 +0000
	I0916 11:08:05.960447 2175536 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 11:08:06.090370 2175536 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:08:06.093948 2175536 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:08:06.094092 2175536 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:08:06.125402 2175536 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf, 
	I0916 11:08:06.125428 2175536 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:08:06.125436 2175536 start.go:495] detecting cgroup driver to use...
	I0916 11:08:06.125470 2175536 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:08:06.125523 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 11:08:06.138592 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 11:08:06.151214 2175536 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:08:06.151287 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:08:06.165633 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:08:06.181163 2175536 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:08:06.278134 2175536 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:08:06.383302 2175536 command_runner.go:130] ! Created symlink /etc/systemd/system/cri-docker.service → /dev/null.
	I0916 11:08:06.383384 2175536 docker.go:233] disabling docker service ...
	I0916 11:08:06.383460 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:08:06.406049 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:08:06.417868 2175536 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:08:06.511389 2175536 command_runner.go:130] ! Removed /etc/systemd/system/sockets.target.wants/docker.socket.
	I0916 11:08:06.511469 2175536 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:08:06.608939 2175536 command_runner.go:130] ! Created symlink /etc/systemd/system/docker.service → /dev/null.
	I0916 11:08:06.609020 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:08:06.621543 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:08:06.640183 2175536 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0916 11:08:06.644666 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 11:08:06.656578 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 11:08:06.668824 2175536 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 11:08:06.668921 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 11:08:06.681467 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:08:06.695397 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 11:08:06.708191 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:08:06.719433 2175536 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:08:06.729479 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 11:08:06.740279 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 11:08:06.750664 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 11:08:06.763254 2175536 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:08:06.771014 2175536 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:08:06.772406 2175536 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:08:06.781503 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:08:06.874948 2175536 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 11:08:07.020548 2175536 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 11:08:07.020707 2175536 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 11:08:07.024371 2175536 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0916 11:08:07.024440 2175536 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:08:07.024460 2175536 command_runner.go:130] > Device: fdh/253d	Inode: 175         Links: 1
	I0916 11:08:07.024481 2175536 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:08:07.024512 2175536 command_runner.go:130] > Access: 2024-09-16 11:08:06.964092055 +0000
	I0916 11:08:07.024547 2175536 command_runner.go:130] > Modify: 2024-09-16 11:08:06.964092055 +0000
	I0916 11:08:07.024575 2175536 command_runner.go:130] > Change: 2024-09-16 11:08:06.964092055 +0000
	I0916 11:08:07.024593 2175536 command_runner.go:130] >  Birth: -
	I0916 11:08:07.024862 2175536 start.go:563] Will wait 60s for crictl version
	I0916 11:08:07.024953 2175536 ssh_runner.go:195] Run: which crictl
	I0916 11:08:07.028218 2175536 command_runner.go:130] > /usr/bin/crictl
	I0916 11:08:07.028574 2175536 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:08:07.071564 2175536 command_runner.go:130] > Version:  0.1.0
	I0916 11:08:07.071636 2175536 command_runner.go:130] > RuntimeName:  containerd
	I0916 11:08:07.071655 2175536 command_runner.go:130] > RuntimeVersion:  1.7.22
	I0916 11:08:07.071673 2175536 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:08:07.074372 2175536 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 11:08:07.074497 2175536 ssh_runner.go:195] Run: containerd --version
	I0916 11:08:07.097053 2175536 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:08:07.097148 2175536 ssh_runner.go:195] Run: containerd --version
	I0916 11:08:07.119375 2175536 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:08:07.125432 2175536 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 11:08:07.127244 2175536 out.go:177]   - env NO_PROXY=192.168.58.2
	I0916 11:08:07.129462 2175536 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:08:07.145671 2175536 ssh_runner.go:195] Run: grep 192.168.58.1	host.minikube.internal$ /etc/hosts
	I0916 11:08:07.149762 2175536 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.58.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:08:07.161040 2175536 mustload.go:65] Loading cluster: multinode-890146
	I0916 11:08:07.161250 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:08:07.161520 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:08:07.179177 2175536 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:08:07.179488 2175536 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146 for IP: 192.168.58.3
	I0916 11:08:07.179502 2175536 certs.go:194] generating shared ca certs ...
	I0916 11:08:07.179518 2175536 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:08:07.179847 2175536 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 11:08:07.182772 2175536 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 11:08:07.182805 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:08:07.182852 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:08:07.182868 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:08:07.182880 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:08:07.182940 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 11:08:07.182980 2175536 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 11:08:07.182989 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:08:07.183013 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 11:08:07.183034 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:08:07.183055 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 11:08:07.183101 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:08:07.183128 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 11:08:07.183141 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:08:07.183152 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 11:08:07.183170 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:08:07.209946 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 11:08:07.235360 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:08:07.260765 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 11:08:07.285760 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 11:08:07.310087 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:08:07.335622 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 11:08:07.361368 2175536 ssh_runner.go:195] Run: openssl version
	I0916 11:08:07.366767 2175536 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:08:07.367237 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:08:07.376822 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:08:07.380555 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:08:07.380600 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:08:07.380650 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:08:07.389700 2175536 command_runner.go:130] > b5213941
	I0916 11:08:07.390146 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:08:07.399530 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 11:08:07.408966 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 11:08:07.412600 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:08:07.412639 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:08:07.412695 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 11:08:07.419633 2175536 command_runner.go:130] > 51391683
	I0916 11:08:07.420189 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 11:08:07.430005 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 11:08:07.439393 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 11:08:07.442833 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:08:07.442881 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:08:07.442961 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 11:08:07.449629 2175536 command_runner.go:130] > 3ec20f2e
	I0916 11:08:07.450113 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:08:07.459786 2175536 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:08:07.463161 2175536 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:08:07.463195 2175536 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:08:07.463250 2175536 kubeadm.go:934] updating node {m02 192.168.58.3 8443 v1.31.1 containerd false true} ...
	I0916 11:08:07.463350 2175536 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-890146-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.58.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:08:07.463424 2175536 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:08:07.471270 2175536 command_runner.go:130] > kubeadm
	I0916 11:08:07.471289 2175536 command_runner.go:130] > kubectl
	I0916 11:08:07.471294 2175536 command_runner.go:130] > kubelet
	I0916 11:08:07.472417 2175536 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:08:07.472512 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 11:08:07.481280 2175536 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (324 bytes)
	I0916 11:08:07.500033 2175536 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:08:07.518293 2175536 ssh_runner.go:195] Run: grep 192.168.58.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:08:07.521622 2175536 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.58.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:08:07.532421 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:08:07.616946 2175536 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:08:07.632778 2175536 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:08:07.633087 2175536 start.go:317] joinCluster: &{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p
2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:08:07.633181 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 11:08:07.633232 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:08:07.655231 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:08:07.813899 2175536 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token ptf107.9si9q29neobz8pd1 --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 11:08:07.813944 2175536 start.go:343] trying to join worker node "m02" to cluster: &{Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}
	I0916 11:08:07.813977 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token ptf107.9si9q29neobz8pd1 --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=multinode-890146-m02"
	I0916 11:08:07.856516 2175536 command_runner.go:130] > [preflight] Running pre-flight checks
	I0916 11:08:07.867055 2175536 command_runner.go:130] > [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:08:07.867081 2175536 command_runner.go:130] > KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:08:07.867087 2175536 command_runner.go:130] > OS: Linux
	I0916 11:08:07.867094 2175536 command_runner.go:130] > CGROUPS_CPU: enabled
	I0916 11:08:07.867100 2175536 command_runner.go:130] > CGROUPS_CPUACCT: enabled
	I0916 11:08:07.867106 2175536 command_runner.go:130] > CGROUPS_CPUSET: enabled
	I0916 11:08:07.867112 2175536 command_runner.go:130] > CGROUPS_DEVICES: enabled
	I0916 11:08:07.867125 2175536 command_runner.go:130] > CGROUPS_FREEZER: enabled
	I0916 11:08:07.867133 2175536 command_runner.go:130] > CGROUPS_MEMORY: enabled
	I0916 11:08:07.867139 2175536 command_runner.go:130] > CGROUPS_PIDS: enabled
	I0916 11:08:07.867147 2175536 command_runner.go:130] > CGROUPS_HUGETLB: enabled
	I0916 11:08:07.867153 2175536 command_runner.go:130] > CGROUPS_BLKIO: enabled
	I0916 11:08:07.961931 2175536 command_runner.go:130] > [preflight] Reading configuration from the cluster...
	I0916 11:08:07.961957 2175536 command_runner.go:130] > [preflight] FYI: You can look at this config file with 'kubectl -n kube-system get cm kubeadm-config -o yaml'
	I0916 11:08:08.004665 2175536 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:08:08.005488 2175536 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:08:08.005517 2175536 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0916 11:08:08.108491 2175536 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:08:09.610208 2175536 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.501897178s
	I0916 11:08:09.610236 2175536 command_runner.go:130] > [kubelet-start] Waiting for the kubelet to perform the TLS Bootstrap
	I0916 11:08:09.634985 2175536 command_runner.go:130] > This node has joined the cluster:
	I0916 11:08:09.635006 2175536 command_runner.go:130] > * Certificate signing request was sent to apiserver and a response was received.
	I0916 11:08:09.635013 2175536 command_runner.go:130] > * The Kubelet was informed of the new secure connection details.
	I0916 11:08:09.635020 2175536 command_runner.go:130] > Run 'kubectl get nodes' on the control-plane to see this node join the cluster.
	I0916 11:08:09.638528 2175536 command_runner.go:130] ! 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:08:09.638559 2175536 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:08:09.638576 2175536 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token ptf107.9si9q29neobz8pd1 --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=multinode-890146-m02": (1.824584701s)
	I0916 11:08:09.638594 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 11:08:09.826606 2175536 command_runner.go:130] ! Created symlink /etc/systemd/system/multi-user.target.wants/kubelet.service → /lib/systemd/system/kubelet.service.
	I0916 11:08:09.826836 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-890146-m02 minikube.k8s.io/updated_at=2024_09_16T11_08_09_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=multinode-890146 minikube.k8s.io/primary=false
	I0916 11:08:09.930074 2175536 command_runner.go:130] > node/multinode-890146-m02 labeled
	I0916 11:08:09.934376 2175536 start.go:319] duration metric: took 2.301283567s to joinCluster
	I0916 11:08:09.934475 2175536 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}
	I0916 11:08:09.934886 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:08:09.936448 2175536 out.go:177] * Verifying Kubernetes components...
	I0916 11:08:09.938297 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:08:10.046015 2175536 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:08:10.061308 2175536 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:08:10.061600 2175536 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:08:10.061895 2175536 node_ready.go:35] waiting up to 6m0s for node "multinode-890146-m02" to be "Ready" ...
	I0916 11:08:10.061985 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:10.061997 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.062006 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.062012 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.064827 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.064861 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.064870 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.064875 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.064879 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.064883 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.064886 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.064889 2175536 round_trippers.go:580]     Audit-Id: b1ad4d49-0b33-40c0-acd8-87c788f26df9
	I0916 11:08:10.065007 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"499","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4486 chars]
	I0916 11:08:10.562184 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:10.562211 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.562240 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.562245 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.564636 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.564661 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.564669 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.564673 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.564678 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.564681 2175536 round_trippers.go:580]     Audit-Id: 1cf414cc-b1f3-4920-a9dc-a885ace12ed0
	I0916 11:08:10.564684 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.564689 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.564815 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:10.565187 2175536 node_ready.go:49] node "multinode-890146-m02" has status "Ready":"True"
	I0916 11:08:10.565201 2175536 node_ready.go:38] duration metric: took 503.286116ms for node "multinode-890146-m02" to be "Ready" ...
	I0916 11:08:10.565210 2175536 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:08:10.565273 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:08:10.565279 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.565287 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.565291 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.568523 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:08:10.568590 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.568624 2175536 round_trippers.go:580]     Audit-Id: 6a0b9f9e-22ad-4a40-83aa-450cc457cb96
	I0916 11:08:10.568635 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.568669 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.568680 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.568683 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.568686 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.569951 2175536 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"503"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 69157 chars]
	I0916 11:08:10.573175 2175536 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.573304 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:08:10.573318 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.573327 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.573333 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.576950 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:08:10.576973 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.576982 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.576987 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.576990 2175536 round_trippers.go:580]     Audit-Id: 03285052-af0c-43bf-b0c2-a3a2a6c9acdc
	I0916 11:08:10.576993 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.576996 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.577000 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.577117 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6480 chars]
	I0916 11:08:10.577682 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:10.577693 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.577701 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.577705 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.580094 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.580157 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.580183 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.580203 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.580214 2175536 round_trippers.go:580]     Audit-Id: 0ddfb439-4011-4e4c-823c-4e38896653d8
	I0916 11:08:10.580218 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.580224 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.580227 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.580393 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:10.580772 2175536 pod_ready.go:93] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:10.580794 2175536 pod_ready.go:82] duration metric: took 7.572236ms for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.580806 2175536 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.580874 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-890146
	I0916 11:08:10.580885 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.580893 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.580899 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.583233 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.583254 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.583262 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.583269 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.583279 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.583282 2175536 round_trippers.go:580]     Audit-Id: 9fe472de-f4b3-4a16-8cec-5efd0c165a4c
	I0916 11:08:10.583285 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.583287 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.583459 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-890146","namespace":"kube-system","uid":"59c960ab-f6dd-4ed3-856c-ba2a02295b12","resourceVersion":"327","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.58.2:2379","kubernetes.io/config.hash":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.mirror":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.seen":"2024-09-16T11:07:32.783608135Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6440 chars]
	I0916 11:08:10.583987 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:10.584007 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.584017 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.584022 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.586155 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.586178 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.586186 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.586190 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.586192 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.586197 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.586201 2175536 round_trippers.go:580]     Audit-Id: b682bbbd-8dd8-4a62-b11c-cc6e100901c7
	I0916 11:08:10.586203 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.586399 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:10.586855 2175536 pod_ready.go:93] pod "etcd-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:10.586883 2175536 pod_ready.go:82] duration metric: took 6.065667ms for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.586910 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.586984 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-890146
	I0916 11:08:10.586996 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.587005 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.587009 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.589233 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.589254 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.589262 2175536 round_trippers.go:580]     Audit-Id: 0144dd60-5378-4178-acb9-4c51669f48d4
	I0916 11:08:10.589265 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.589268 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.589270 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.589273 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.589276 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.589505 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-890146","namespace":"kube-system","uid":"9846229d-7227-453f-8c30-697aaba61648","resourceVersion":"432","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.58.2:8443","kubernetes.io/config.hash":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.mirror":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.seen":"2024-09-16T11:07:32.783610957Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8518 chars]
	I0916 11:08:10.590104 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:10.590123 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.590132 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.590137 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.592238 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.592274 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.592283 2175536 round_trippers.go:580]     Audit-Id: 89508ee3-1f81-43d8-bf7c-0da4e90c1ac9
	I0916 11:08:10.592288 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.592293 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.592297 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.592300 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.592303 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.592688 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:10.593085 2175536 pod_ready.go:93] pod "kube-apiserver-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:10.593105 2175536 pod_ready.go:82] duration metric: took 6.187685ms for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.593116 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.593229 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-890146
	I0916 11:08:10.593242 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.593251 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.593255 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.595341 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.595363 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.595371 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.595374 2175536 round_trippers.go:580]     Audit-Id: c219233d-4a00-41ab-9483-14f0fc685b13
	I0916 11:08:10.595377 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.595382 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.595384 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.595387 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.595647 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-890146","namespace":"kube-system","uid":"421b15a5-a8e2-4631-bf18-1592c8747b09","resourceVersion":"436","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.mirror":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.seen":"2024-09-16T11:07:32.783594137Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8093 chars]
	I0916 11:08:10.596184 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:10.596203 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.596211 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.596214 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.598211 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:08:10.598232 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.598240 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.598245 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.598248 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.598252 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.598257 2175536 round_trippers.go:580]     Audit-Id: 3b6b0777-42b3-4f85-af69-59a9bc2435d2
	I0916 11:08:10.598260 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.598565 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:10.598988 2175536 pod_ready.go:93] pod "kube-controller-manager-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:10.599008 2175536 pod_ready.go:82] duration metric: took 5.878714ms for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.599020 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.762322 2175536 request.go:632] Waited for 163.220346ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:10.762393 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:10.762403 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.762412 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.762419 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.764731 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.764804 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.764827 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.764846 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.764860 2175536 round_trippers.go:580]     Audit-Id: 7e4ac130-437c-445a-9474-91da24dc70ab
	I0916 11:08:10.764899 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.764916 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.764941 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.765049 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"492","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 4018 chars]
	I0916 11:08:10.962847 2175536 request.go:632] Waited for 197.357703ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:10.962915 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:10.962925 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.962944 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.962959 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.965292 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.965318 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.965327 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.965333 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.965338 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.965342 2175536 round_trippers.go:580]     Audit-Id: ea137220-6640-48b8-8a78-ef9c9366d0b3
	I0916 11:08:10.965353 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.965356 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.965661 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:11.162821 2175536 request.go:632] Waited for 63.218207ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:11.162887 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:11.162897 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:11.162906 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:11.162914 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:11.171386 2175536 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 11:08:11.171415 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:11.171424 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:11.171429 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:11.171432 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:11.171436 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:11.171440 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:11 GMT
	I0916 11:08:11.171442 2175536 round_trippers.go:580]     Audit-Id: e29516a2-3d43-431c-9ab7-d2f8bb7c2f57
	I0916 11:08:11.171940 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"492","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 4018 chars]
	I0916 11:08:11.362795 2175536 request.go:632] Waited for 190.415339ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:11.362944 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:11.362975 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:11.362998 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:11.363012 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:11.365404 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:11.365430 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:11.365445 2175536 round_trippers.go:580]     Audit-Id: 982f9db0-7d1b-432d-87f9-5b66bee9fce9
	I0916 11:08:11.365452 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:11.365455 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:11.365458 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:11.365461 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:11.365469 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:11 GMT
	I0916 11:08:11.365777 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:11.600294 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:11.600413 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:11.600450 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:11.600506 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:11.604249 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:08:11.604290 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:11.604299 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:11.604303 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:11.604306 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:11.604311 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:11 GMT
	I0916 11:08:11.604314 2175536 round_trippers.go:580]     Audit-Id: 5b13aca1-e689-4ed6-a98d-6ed21a617fd1
	I0916 11:08:11.604317 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:11.604910 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"505","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6209 chars]
	I0916 11:08:11.762785 2175536 request.go:632] Waited for 157.323121ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:11.762931 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:11.762950 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:11.762959 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:11.762966 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:11.767332 2175536 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:08:11.767401 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:11.767423 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:11.767440 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:11 GMT
	I0916 11:08:11.767456 2175536 round_trippers.go:580]     Audit-Id: b06c9cbf-8916-4564-b607-b4b63aa09b76
	I0916 11:08:11.767481 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:11.767501 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:11.767516 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:11.767699 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:12.099720 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:12.099748 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.099758 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.099762 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.102443 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.102470 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.102483 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.102488 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.102491 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.102495 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.102498 2175536 round_trippers.go:580]     Audit-Id: c5cab472-8457-492b-98fb-f0936be34abe
	I0916 11:08:12.102500 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.102796 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"505","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6209 chars]
	I0916 11:08:12.162653 2175536 request.go:632] Waited for 59.256414ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:12.162771 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:12.162778 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.162787 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.162794 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.165185 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.165207 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.165216 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.165220 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.165222 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.165225 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.165227 2175536 round_trippers.go:580]     Audit-Id: 757781fc-198b-49dc-8b83-283ae57f1413
	I0916 11:08:12.165230 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.165610 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:12.599287 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:12.599312 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.599322 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.599328 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.601604 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.601626 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.601634 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.601640 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.601643 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.601646 2175536 round_trippers.go:580]     Audit-Id: e7b073e2-d3ba-4abf-aef2-4c6807f9795d
	I0916 11:08:12.601649 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.601651 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.601962 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"519","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6183 chars]
	I0916 11:08:12.602505 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:12.602523 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.602532 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.602538 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.604593 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.604620 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.604628 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.604633 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.604637 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.604639 2175536 round_trippers.go:580]     Audit-Id: 198fd972-db21-44cc-9dac-ea54d2099356
	I0916 11:08:12.604643 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.604651 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.604965 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:12.605324 2175536 pod_ready.go:93] pod "kube-proxy-59f9h" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:12.605344 2175536 pod_ready.go:82] duration metric: took 2.006315508s for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:12.605356 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:12.762778 2175536 request.go:632] Waited for 157.286248ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:08:12.762838 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:08:12.762845 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.762853 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.762863 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.765379 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.765447 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.765484 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.765506 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.765522 2175536 round_trippers.go:580]     Audit-Id: 3ae5b81b-9737-4d7d-9c3c-4cc2ca8354c4
	I0916 11:08:12.765539 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.765567 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.765587 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.765750 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"412","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6175 chars]
	I0916 11:08:12.962648 2175536 request.go:632] Waited for 196.340469ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:12.962772 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:12.962786 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.962796 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.962800 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.965196 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.965262 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.965285 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.965302 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.965317 2175536 round_trippers.go:580]     Audit-Id: e1e2d869-d8ed-4096-a5f3-2a88a7eaf374
	I0916 11:08:12.965347 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.965369 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.965378 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.965591 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:12.965993 2175536 pod_ready.go:93] pod "kube-proxy-fm5qr" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:12.966013 2175536 pod_ready.go:82] duration metric: took 360.649916ms for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:12.966028 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:13.162762 2175536 request.go:632] Waited for 196.605419ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:08:13.162831 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:08:13.162868 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:13.162880 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:13.162885 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:13.165421 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:13.165449 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:13.165458 2175536 round_trippers.go:580]     Audit-Id: 2dcf7ab0-d2e0-4cf4-894b-326dc18f9cc6
	I0916 11:08:13.165464 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:13.165469 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:13.165477 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:13.165483 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:13.165487 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:13 GMT
	I0916 11:08:13.165816 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"438","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 4975 chars]
	I0916 11:08:13.362859 2175536 request.go:632] Waited for 196.441481ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:13.362932 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:13.362941 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:13.362950 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:13.362957 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:13.365475 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:13.365510 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:13.365520 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:13.365524 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:13 GMT
	I0916 11:08:13.365528 2175536 round_trippers.go:580]     Audit-Id: 27e6524a-64b8-4349-893b-6758c536e036
	I0916 11:08:13.365532 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:13.365535 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:13.365539 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:13.365951 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:13.366412 2175536 pod_ready.go:93] pod "kube-scheduler-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:13.366439 2175536 pod_ready.go:82] duration metric: took 400.396713ms for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:13.366452 2175536 pod_ready.go:39] duration metric: took 2.80123186s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:08:13.366466 2175536 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:08:13.366549 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:08:13.378668 2175536 system_svc.go:56] duration metric: took 12.192528ms WaitForService to wait for kubelet
	I0916 11:08:13.378764 2175536 kubeadm.go:582] duration metric: took 3.44425357s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:08:13.378787 2175536 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:08:13.562293 2175536 request.go:632] Waited for 183.415326ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes
	I0916 11:08:13.562353 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes
	I0916 11:08:13.562359 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:13.562368 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:13.562377 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:13.565435 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:08:13.565466 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:13.565486 2175536 round_trippers.go:580]     Audit-Id: c7358d21-64a7-4fae-bed0-f5f7f8c5324c
	I0916 11:08:13.565491 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:13.565494 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:13.565535 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:13.565544 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:13.565547 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:13 GMT
	I0916 11:08:13.566135 2175536 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"520"},"items":[{"metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"manag
edFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1", [truncated 11039 chars]
	I0916 11:08:13.566845 2175536 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:08:13.566873 2175536 node_conditions.go:123] node cpu capacity is 2
	I0916 11:08:13.566892 2175536 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:08:13.566897 2175536 node_conditions.go:123] node cpu capacity is 2
	I0916 11:08:13.566902 2175536 node_conditions.go:105] duration metric: took 188.106468ms to run NodePressure ...
	I0916 11:08:13.566914 2175536 start.go:241] waiting for startup goroutines ...
	I0916 11:08:13.566951 2175536 start.go:255] writing updated cluster config ...
	I0916 11:08:13.567260 2175536 ssh_runner.go:195] Run: rm -f paused
	I0916 11:08:13.574763 2175536 out.go:177] * Done! kubectl is now configured to use "multinode-890146" cluster and "default" namespace by default
	E0916 11:08:13.576452 2175536 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	24b8d7a28fb62       89a35e2ebb6b9       35 seconds ago       Running             busybox                   0                   b4e431597f321       busybox-7dff88458-hf6zl
	e8a9035126acc       2f6c962e7b831       58 seconds ago       Running             coredns                   0                   4d75eb3d0406a       coredns-7c65d6cfc9-vp22b
	0ca1a17f49909       ba04bb24b9575       About a minute ago   Running             storage-provisioner       0                   6e9271efa5691       storage-provisioner
	eccab3e428039       6a23fa8fd2b78       About a minute ago   Running             kindnet-cni               0                   c60887e75f823       kindnet-dbrhk
	88800ca3adcda       24a140c548c07       About a minute ago   Running             kube-proxy                0                   a71ab4f91b123       kube-proxy-fm5qr
	e8e11b0a6506f       27e3830e14027       About a minute ago   Running             etcd                      0                   d29e1a2d28295       etcd-multinode-890146
	305b8895a3440       d3f53a98c0a9d       About a minute ago   Running             kube-apiserver            0                   2812a818d9d32       kube-apiserver-multinode-890146
	424e6c1030bdc       7f8aa378bb47d       About a minute ago   Running             kube-scheduler            0                   8856cda765ae4       kube-scheduler-multinode-890146
	9d6ccf43cf5a5       279f381cb3736       About a minute ago   Running             kube-controller-manager   0                   8ad0b604e598f       kube-controller-manager-multinode-890146
	
	
	==> containerd <==
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.875899574Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.875914483Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.876019738Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.936171123Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-7c65d6cfc9-vp22b,Uid:a6adb735-448b-480b-aba1-3ce4d56c6fc7,Namespace:kube-system,Attempt:0,} returns sandbox id \"4d75eb3d0406a0f1516c1dd540e0be271fd8a5b21b2779e789e502b7efe11eb9\""
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.942107322Z" level=info msg="CreateContainer within sandbox \"4d75eb3d0406a0f1516c1dd540e0be271fd8a5b21b2779e789e502b7efe11eb9\" for container &ContainerMetadata{Name:coredns,Attempt:0,}"
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.961549798Z" level=info msg="CreateContainer within sandbox \"4d75eb3d0406a0f1516c1dd540e0be271fd8a5b21b2779e789e502b7efe11eb9\" for &ContainerMetadata{Name:coredns,Attempt:0,} returns container id \"e8a9035126acce637354968a7c4092e855f0da6d2a3f39d1f94b5795e1d5079b\""
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.963144161Z" level=info msg="StartContainer for \"e8a9035126acce637354968a7c4092e855f0da6d2a3f39d1f94b5795e1d5079b\""
	Sep 16 11:07:53 multinode-890146 containerd[823]: time="2024-09-16T11:07:53.021587453Z" level=info msg="StartContainer for \"e8a9035126acce637354968a7c4092e855f0da6d2a3f39d1f94b5795e1d5079b\" returns successfully"
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.708375311Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-7dff88458-hf6zl,Uid:8e7abaaa-be47-456f-9980-53cbfcd75f48,Namespace:default,Attempt:0,}"
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.752948331Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.753039104Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.753056466Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.754070729Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.804334443Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-7dff88458-hf6zl,Uid:8e7abaaa-be47-456f-9980-53cbfcd75f48,Namespace:default,Attempt:0,} returns sandbox id \"b4e431597f32168c0494b111ad4e32bd08acefca6857024028a1a1fbe5f51839\""
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.809715248Z" level=info msg="PullImage \"gcr.io/k8s-minikube/busybox:1.28\""
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.525376043Z" level=info msg="ImageCreate event name:\"gcr.io/k8s-minikube/busybox:1.28\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.526574215Z" level=info msg="stop pulling image gcr.io/k8s-minikube/busybox:1.28: active requests=0, bytes read=766310"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.528358198Z" level=info msg="ImageCreate event name:\"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.531279979Z" level=info msg="ImageCreate event name:\"gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.531999508Z" level=info msg="Pulled image \"gcr.io/k8s-minikube/busybox:1.28\" with image id \"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\", repo tag \"gcr.io/k8s-minikube/busybox:1.28\", repo digest \"gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12\", size \"764554\" in 1.722111371s"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.532118227Z" level=info msg="PullImage \"gcr.io/k8s-minikube/busybox:1.28\" returns image reference \"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\""
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.536596645Z" level=info msg="CreateContainer within sandbox \"b4e431597f32168c0494b111ad4e32bd08acefca6857024028a1a1fbe5f51839\" for container &ContainerMetadata{Name:busybox,Attempt:0,}"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.556009122Z" level=info msg="CreateContainer within sandbox \"b4e431597f32168c0494b111ad4e32bd08acefca6857024028a1a1fbe5f51839\" for &ContainerMetadata{Name:busybox,Attempt:0,} returns container id \"24b8d7a28fb62d5eed5cac16c46c9067bbd582c064a95212b74b18cbaffddfeb\""
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.557019119Z" level=info msg="StartContainer for \"24b8d7a28fb62d5eed5cac16c46c9067bbd582c064a95212b74b18cbaffddfeb\""
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.613496602Z" level=info msg="StartContainer for \"24b8d7a28fb62d5eed5cac16c46c9067bbd582c064a95212b74b18cbaffddfeb\" returns successfully"
	
	
	==> coredns [e8a9035126acce637354968a7c4092e855f0da6d2a3f39d1f94b5795e1d5079b] <==
	[INFO] 10.244.0.3:52581 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00009111s
	[INFO] 10.244.1.2:42040 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000125882s
	[INFO] 10.244.1.2:35392 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.002402252s
	[INFO] 10.244.1.2:42007 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000093398s
	[INFO] 10.244.1.2:58148 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000099052s
	[INFO] 10.244.1.2:60433 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001076606s
	[INFO] 10.244.1.2:45965 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000079565s
	[INFO] 10.244.1.2:44644 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000090806s
	[INFO] 10.244.1.2:36880 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000072689s
	[INFO] 10.244.0.3:35082 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000098806s
	[INFO] 10.244.0.3:38242 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000109899s
	[INFO] 10.244.0.3:60732 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.00008109s
	[INFO] 10.244.0.3:44313 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000072985s
	[INFO] 10.244.1.2:60136 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000150612s
	[INFO] 10.244.1.2:59383 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000110605s
	[INFO] 10.244.1.2:38945 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000092808s
	[INFO] 10.244.1.2:35665 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000081345s
	[INFO] 10.244.0.3:36942 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000120467s
	[INFO] 10.244.0.3:55441 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000108315s
	[INFO] 10.244.0.3:38725 - 5 "PTR IN 1.58.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000129336s
	[INFO] 10.244.0.3:40340 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000172389s
	[INFO] 10.244.1.2:40345 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000111507s
	[INFO] 10.244.1.2:51062 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.0000773s
	[INFO] 10.244.1.2:40631 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000083733s
	[INFO] 10.244.1.2:39196 - 5 "PTR IN 1.58.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000074067s
	
	
	==> describe nodes <==
	Name:               multinode-890146
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-890146
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-890146
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_07_33_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:07:30 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-890146
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:08:44 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:08:34 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:08:34 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:08:34 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:08:34 +0000   Mon, 16 Sep 2024 11:07:30 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.58.2
	  Hostname:    multinode-890146
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 858ccd60bec74dcb8c460c7772b0d996
	  System UUID:                2cb24a37-7b71-4957-b8fd-d0da5c3f8b7a
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-hf6zl                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         38s
	  kube-system                 coredns-7c65d6cfc9-vp22b                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     75s
	  kube-system                 etcd-multinode-890146                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         79s
	  kube-system                 kindnet-dbrhk                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      75s
	  kube-system                 kube-apiserver-multinode-890146             250m (12%)    0 (0%)      0 (0%)           0 (0%)         79s
	  kube-system                 kube-controller-manager-multinode-890146    200m (10%)    0 (0%)      0 (0%)           0 (0%)         79s
	  kube-system                 kube-proxy-fm5qr                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         75s
	  kube-system                 kube-scheduler-multinode-890146             100m (5%)     0 (0%)      0 (0%)           0 (0%)         81s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         74s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 73s                kube-proxy       
	  Normal   NodeAllocatableEnforced  87s                kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 87s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  87s (x8 over 87s)  kubelet          Node multinode-890146 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    87s (x7 over 87s)  kubelet          Node multinode-890146 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     87s (x7 over 87s)  kubelet          Node multinode-890146 status is now: NodeHasSufficientPID
	  Normal   Starting                 87s                kubelet          Starting kubelet.
	  Normal   Starting                 80s                kubelet          Starting kubelet.
	  Warning  CgroupV1                 80s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  80s                kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  79s                kubelet          Node multinode-890146 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    79s                kubelet          Node multinode-890146 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     79s                kubelet          Node multinode-890146 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           76s                node-controller  Node multinode-890146 event: Registered Node multinode-890146 in Controller
	
	
	Name:               multinode-890146-m02
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-890146-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-890146
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_08_09_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:08:09 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-890146-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:08:49 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:08:39 +0000   Mon, 16 Sep 2024 11:08:09 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:08:39 +0000   Mon, 16 Sep 2024 11:08:09 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:08:39 +0000   Mon, 16 Sep 2024 11:08:09 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:08:39 +0000   Mon, 16 Sep 2024 11:08:10 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.58.3
	  Hostname:    multinode-890146-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 89c2f3455f224166b20833d27ea5ec88
	  System UUID:                afe70f4d-0cb5-4f79-97b8-28a81db2fa30
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-wrnfh    0 (0%)        0 (0%)      0 (0%)           0 (0%)         38s
	  kube-system                 kindnet-4sjj6              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      43s
	  kube-system                 kube-proxy-59f9h           0 (0%)        0 (0%)      0 (0%)           0 (0%)         43s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 40s                kube-proxy       
	  Warning  CgroupV1                 43s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  43s (x2 over 43s)  kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    43s (x2 over 43s)  kubelet          Node multinode-890146-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     43s (x2 over 43s)  kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  43s                kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeReady                42s                kubelet          Node multinode-890146-m02 status is now: NodeReady
	  Normal   RegisteredNode           41s                node-controller  Node multinode-890146-m02 event: Registered Node multinode-890146-m02 in Controller
	
	
	Name:               multinode-890146-m03
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-890146-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-890146
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_08_46_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:08:46 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-890146-m03
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:08:46 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:08:47 +0000   Mon, 16 Sep 2024 11:08:46 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:08:47 +0000   Mon, 16 Sep 2024 11:08:46 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:08:47 +0000   Mon, 16 Sep 2024 11:08:46 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:08:47 +0000   Mon, 16 Sep 2024 11:08:47 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.58.4
	  Hostname:    multinode-890146-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 0e702486783f4a4584ef5ec252793eab
	  System UUID:                04a69dc2-fb3b-47af-837f-29ac0a7025b8
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (2 in total)
	  Namespace                   Name                CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                ------------  ----------  ---------------  -------------  ---
	  kube-system                 kindnet-ndgrk       100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      6s
	  kube-system                 kube-proxy-vl27g    0 (0%)        0 (0%)      0 (0%)           0 (0%)         6s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age              From             Message
	  ----     ------                   ----             ----             -------
	  Normal   Starting                 3s               kube-proxy       
	  Warning  CgroupV1                 7s               kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   RegisteredNode           6s               node-controller  Node multinode-890146-m03 event: Registered Node multinode-890146-m03 in Controller
	  Normal   NodeHasSufficientMemory  6s (x2 over 6s)  kubelet          Node multinode-890146-m03 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    6s (x2 over 6s)  kubelet          Node multinode-890146-m03 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     6s (x2 over 6s)  kubelet          Node multinode-890146-m03 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  6s               kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeReady                5s               kubelet          Node multinode-890146-m03 status is now: NodeReady
	
	
	==> dmesg <==
	
	
	==> etcd [e8e11b0a6506f9d34c5800c4a5a6bcc8b9f3225a3487a3c437bc87d0b0aaf53d] <==
	{"level":"info","ts":"2024-09-16T11:07:26.598384Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T11:07:26.598542Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:07:26.598739Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:07:26.599020Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"b2c6679ac05f2cf1","initial-advertise-peer-urls":["https://192.168.58.2:2380"],"listen-peer-urls":["https://192.168.58.2:2380"],"advertise-client-urls":["https://192.168.58.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.58.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:07:26.599063Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:07:26.816839Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T11:07:26.816896Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T11:07:26.816925Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgPreVoteResp from b2c6679ac05f2cf1 at term 1"}
	{"level":"info","ts":"2024-09-16T11:07:26.816938Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.816946Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgVoteResp from b2c6679ac05f2cf1 at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.816957Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became leader at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.816966Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: b2c6679ac05f2cf1 elected leader b2c6679ac05f2cf1 at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.818827Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.822083Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"b2c6679ac05f2cf1","local-member-attributes":"{Name:multinode-890146 ClientURLs:[https://192.168.58.2:2379]}","request-path":"/0/members/b2c6679ac05f2cf1/attributes","cluster-id":"3a56e4ca95e2355c","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:07:26.822228Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:07:26.823261Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:07:26.824339Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:07:26.830909Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:07:26.831002Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"3a56e4ca95e2355c","local-member-id":"b2c6679ac05f2cf1","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.831112Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.831144Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.831564Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:07:26.831585Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:07:26.840842Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:07:26.842343Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.58.2:2379"}
	
	
	==> kernel <==
	 11:08:52 up 1 day, 14:51,  0 users,  load average: 1.62, 2.23, 2.17
	Linux multinode-890146 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [eccab3e428039af99fd1b2378ae8fd52f2837469955a8b78fd8b72f906813586] <==
	I0916 11:07:49.340587       1 main.go:299] handling current node
	I0916 11:07:59.340213       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:07:59.340251       1 main.go:299] handling current node
	I0916 11:08:09.341921       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:08:09.341978       1 main.go:299] handling current node
	I0916 11:08:19.335753       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:08:19.335793       1 main.go:299] handling current node
	I0916 11:08:19.336025       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:08:19.336115       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:08:19.336345       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.1.0/24 Src: <nil> Gw: 192.168.58.3 Flags: [] Table: 0} 
	I0916 11:08:29.336643       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:08:29.336901       1 main.go:299] handling current node
	I0916 11:08:29.337042       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:08:29.337134       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:08:39.346867       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:08:39.347023       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:08:39.347190       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:08:39.347206       1 main.go:299] handling current node
	I0916 11:08:49.340449       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:08:49.340703       1 main.go:299] handling current node
	I0916 11:08:49.340775       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:08:49.340845       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:08:49.341113       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:08:49.341127       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:08:49.341182       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.2.0/24 Src: <nil> Gw: 192.168.58.4 Flags: [] Table: 0} 
	
	
	==> kube-apiserver [305b8895a34401a4626618470969b6ca3b591de13b0d36af0b2b2b23096ac46b] <==
	E0916 11:07:30.119633       1 controller.go:148] "Unhandled Error" err="while syncing ConfigMap \"kube-system/kube-apiserver-legacy-service-account-token-tracking\", err: namespaces \"kube-system\" not found" logger="UnhandledError"
	I0916 11:07:30.130522       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 11:07:30.311421       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:07:30.828773       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0916 11:07:30.837047       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0916 11:07:30.837071       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:07:31.543409       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 11:07:31.599890       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 11:07:31.699349       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 11:07:31.710041       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.58.2]
	I0916 11:07:31.711701       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:07:31.718520       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 11:07:31.982310       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 11:07:32.906044       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 11:07:32.918442       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 11:07:32.931656       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 11:07:36.841575       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 11:07:37.332525       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	E0916 11:08:30.067502       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36938: use of closed network connection
	E0916 11:08:30.295579       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36950: use of closed network connection
	E0916 11:08:30.506756       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36960: use of closed network connection
	E0916 11:08:30.713694       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36984: use of closed network connection
	E0916 11:08:31.123853       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:37034: use of closed network connection
	E0916 11:08:31.466128       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:37056: use of closed network connection
	E0916 11:08:32.100602       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:37082: use of closed network connection
	
	
	==> kube-controller-manager [9d6ccf43cf5a5c28d56e616702330e693dc76d6773c7cc3e02e94f189195689b] <==
	I0916 11:08:11.982982       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="multinode-890146-m02"
	I0916 11:08:14.387728       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="50.768606ms"
	I0916 11:08:14.452119       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="64.332523ms"
	I0916 11:08:14.452218       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="51.766µs"
	I0916 11:08:14.468869       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="65.157µs"
	I0916 11:08:16.989815       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="7.491662ms"
	I0916 11:08:16.989901       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="44.176µs"
	I0916 11:08:29.309361       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="12.824827ms"
	I0916 11:08:29.310058       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="648.874µs"
	I0916 11:08:34.122844       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146"
	I0916 11:08:39.809520       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:08:46.203029       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:08:46.203855       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"multinode-890146-m03\" does not exist"
	I0916 11:08:46.218321       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="multinode-890146-m03" podCIDRs=["10.244.2.0/24"]
	I0916 11:08:46.218361       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.219096       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.235698       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.302636       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.605228       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.989720       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="multinode-890146-m03"
	I0916 11:08:47.080416       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:47.243737       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:08:47.243773       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:47.254901       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:52.008891       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	
	
	==> kube-proxy [88800ca3adcdad421bba0ffcef548a966eeb5c210e5453a2ba8470a9e90ea01e] <==
	I0916 11:07:38.741936       1 server_linux.go:66] "Using iptables proxy"
	I0916 11:07:38.860580       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.58.2"]
	E0916 11:07:38.860641       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 11:07:38.962890       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 11:07:38.962952       1 server_linux.go:169] "Using iptables Proxier"
	I0916 11:07:38.967180       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 11:07:38.968664       1 server.go:483] "Version info" version="v1.31.1"
	I0916 11:07:38.968689       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:07:38.978327       1 config.go:199] "Starting service config controller"
	I0916 11:07:38.978605       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 11:07:38.979008       1 config.go:105] "Starting endpoint slice config controller"
	I0916 11:07:38.979892       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 11:07:38.980656       1 config.go:328] "Starting node config controller"
	I0916 11:07:38.980803       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 11:07:39.083387       1 shared_informer.go:320] Caches are synced for node config
	I0916 11:07:39.083600       1 shared_informer.go:320] Caches are synced for service config
	I0916 11:07:39.083628       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [424e6c1030bdc58751fd76a7652c31e5bd7dff844d888049b87815ddfaecc90b] <==
	I0916 11:07:29.575629       1 serving.go:386] Generated self-signed cert in-memory
	I0916 11:07:32.225115       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 11:07:32.225654       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:07:32.234302       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 11:07:32.234494       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 11:07:32.234575       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 11:07:32.234645       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 11:07:32.244028       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 11:07:32.245351       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 11:07:32.244860       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 11:07:32.254326       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 11:07:32.335551       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 11:07:32.355020       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 11:07:32.355033       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.256142    1515 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\": failed to find network info for sandbox \"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\"" pod="kube-system/coredns-7c65d6cfc9-vp22b"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.256194    1515 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\": failed to find network info for sandbox \"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\"" pod="kube-system/coredns-7c65d6cfc9-vp22b"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.256263    1515 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-vp22b_kube-system(a6adb735-448b-480b-aba1-3ce4d56c6fc7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-vp22b_kube-system(a6adb735-448b-480b-aba1-3ce4d56c6fc7)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\\\": failed to find network info for sandbox \\\"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\\\"\"" pod="kube-system/coredns-7c65d6cfc9-vp22b" podUID="a6adb735-448b-480b-aba1-3ce4d56c6fc7"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.261111    1515 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\": failed to find network info for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\""
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.261194    1515 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\": failed to find network info for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\"" pod="kube-system/coredns-7c65d6cfc9-bb4db"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.261217    1515 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\": failed to find network info for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\"" pod="kube-system/coredns-7c65d6cfc9-bb4db"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.261295    1515 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-bb4db_kube-system(3fd53b00-28ef-44ef-8541-097ebc870b2f)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-bb4db_kube-system(3fd53b00-28ef-44ef-8541-097ebc870b2f)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\\\": failed to find network info for sandbox \\\"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\\\"\"" pod="kube-system/coredns-7c65d6cfc9-bb4db" podUID="3fd53b00-28ef-44ef-8541-097ebc870b2f"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.906282    1515 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kindnet-dbrhk" podStartSLOduration=1.906263313 podStartE2EDuration="1.906263313s" podCreationTimestamp="2024-09-16 11:07:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:07:38.903272444 +0000 UTC m=+6.207702668" watchObservedRunningTime="2024-09-16 11:07:38.906263313 +0000 UTC m=+6.210693537"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.960520    1515 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-proxy-fm5qr" podStartSLOduration=1.960501425 podStartE2EDuration="1.960501425s" podCreationTimestamp="2024-09-16 11:07:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:07:38.923652778 +0000 UTC m=+6.228082994" watchObservedRunningTime="2024-09-16 11:07:38.960501425 +0000 UTC m=+6.264931641"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.969625    1515 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjcfc\" (UniqueName: \"kubernetes.io/projected/3fd53b00-28ef-44ef-8541-097ebc870b2f-kube-api-access-wjcfc\") pod \"3fd53b00-28ef-44ef-8541-097ebc870b2f\" (UID: \"3fd53b00-28ef-44ef-8541-097ebc870b2f\") "
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.969683    1515 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3fd53b00-28ef-44ef-8541-097ebc870b2f-config-volume\") pod \"3fd53b00-28ef-44ef-8541-097ebc870b2f\" (UID: \"3fd53b00-28ef-44ef-8541-097ebc870b2f\") "
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.969855    1515 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/97795413-5c7a-480b-9cbd-18d4dea5669b-tmp\") pod \"storage-provisioner\" (UID: \"97795413-5c7a-480b-9cbd-18d4dea5669b\") " pod="kube-system/storage-provisioner"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.969899    1515 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kv5x\" (UniqueName: \"kubernetes.io/projected/97795413-5c7a-480b-9cbd-18d4dea5669b-kube-api-access-5kv5x\") pod \"storage-provisioner\" (UID: \"97795413-5c7a-480b-9cbd-18d4dea5669b\") " pod="kube-system/storage-provisioner"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.970645    1515 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3fd53b00-28ef-44ef-8541-097ebc870b2f-config-volume" (OuterVolumeSpecName: "config-volume") pod "3fd53b00-28ef-44ef-8541-097ebc870b2f" (UID: "3fd53b00-28ef-44ef-8541-097ebc870b2f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue ""
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.975577    1515 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fd53b00-28ef-44ef-8541-097ebc870b2f-kube-api-access-wjcfc" (OuterVolumeSpecName: "kube-api-access-wjcfc") pod "3fd53b00-28ef-44ef-8541-097ebc870b2f" (UID: "3fd53b00-28ef-44ef-8541-097ebc870b2f"). InnerVolumeSpecName "kube-api-access-wjcfc". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 11:07:39 multinode-890146 kubelet[1515]: I0916 11:07:39.071215    1515 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-wjcfc\" (UniqueName: \"kubernetes.io/projected/3fd53b00-28ef-44ef-8541-097ebc870b2f-kube-api-access-wjcfc\") on node \"multinode-890146\" DevicePath \"\""
	Sep 16 11:07:39 multinode-890146 kubelet[1515]: I0916 11:07:39.071430    1515 reconciler_common.go:288] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3fd53b00-28ef-44ef-8541-097ebc870b2f-config-volume\") on node \"multinode-890146\" DevicePath \"\""
	Sep 16 11:07:40 multinode-890146 kubelet[1515]: I0916 11:07:40.627938    1515 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/storage-provisioner" podStartSLOduration=2.627915516 podStartE2EDuration="2.627915516s" podCreationTimestamp="2024-09-16 11:07:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:07:39.937944276 +0000 UTC m=+7.242374500" watchObservedRunningTime="2024-09-16 11:07:40.627915516 +0000 UTC m=+7.932345740"
	Sep 16 11:07:40 multinode-890146 kubelet[1515]: I0916 11:07:40.834319    1515 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fd53b00-28ef-44ef-8541-097ebc870b2f" path="/var/lib/kubelet/pods/3fd53b00-28ef-44ef-8541-097ebc870b2f/volumes"
	Sep 16 11:07:43 multinode-890146 kubelet[1515]: I0916 11:07:43.244514    1515 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 11:07:43 multinode-890146 kubelet[1515]: I0916 11:07:43.245380    1515 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 11:07:53 multinode-890146 kubelet[1515]: I0916 11:07:53.951905    1515 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-7c65d6cfc9-vp22b" podStartSLOduration=16.951884165 podStartE2EDuration="16.951884165s" podCreationTimestamp="2024-09-16 11:07:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:07:53.937091831 +0000 UTC m=+21.241522064" watchObservedRunningTime="2024-09-16 11:07:53.951884165 +0000 UTC m=+21.256314381"
	Sep 16 11:08:14 multinode-890146 kubelet[1515]: I0916 11:08:14.526967    1515 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4tvl\" (UniqueName: \"kubernetes.io/projected/8e7abaaa-be47-456f-9980-53cbfcd75f48-kube-api-access-r4tvl\") pod \"busybox-7dff88458-hf6zl\" (UID: \"8e7abaaa-be47-456f-9980-53cbfcd75f48\") " pod="default/busybox-7dff88458-hf6zl"
	Sep 16 11:08:16 multinode-890146 kubelet[1515]: I0916 11:08:16.984853    1515 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="default/busybox-7dff88458-hf6zl" podStartSLOduration=1.257437152 podStartE2EDuration="2.984813644s" podCreationTimestamp="2024-09-16 11:08:14 +0000 UTC" firstStartedPulling="2024-09-16 11:08:14.80585921 +0000 UTC m=+42.110289426" lastFinishedPulling="2024-09-16 11:08:16.533235702 +0000 UTC m=+43.837665918" observedRunningTime="2024-09-16 11:08:16.984406327 +0000 UTC m=+44.288836543" watchObservedRunningTime="2024-09-16 11:08:16.984813644 +0000 UTC m=+44.289243860"
	Sep 16 11:08:30 multinode-890146 kubelet[1515]: E0916 11:08:30.916723    1515 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 192.168.58.2:48600->192.168.58.2:10010: read tcp 192.168.58.2:48600->192.168.58.2:10010: read: connection reset by peer
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p multinode-890146 -n multinode-890146
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (354.911µs)
helpers_test.go:263: kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiNode/serial/MultiNodeLabels (2.63s)

                                                
                                    
x
+
TestMultiNode/serial/StartAfterStop (12.03s)

                                                
                                                
=== RUN   TestMultiNode/serial/StartAfterStop
multinode_test.go:282: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 node start m03 -v=7 --alsologtostderr
E0916 11:09:07.673040 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
multinode_test.go:282: (dbg) Done: out/minikube-linux-arm64 -p multinode-890146 node start m03 -v=7 --alsologtostderr: (8.813983244s)
multinode_test.go:290: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 status -v=7 --alsologtostderr
multinode_test.go:306: (dbg) Run:  kubectl get nodes
multinode_test.go:306: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (484.478µs)
multinode_test.go:308: failed to kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiNode/serial/StartAfterStop]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect multinode-890146
helpers_test.go:235: (dbg) docker inspect multinode-890146:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb",
	        "Created": "2024-09-16T11:07:09.881207881Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2176022,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:07:10.021938387Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/hostname",
	        "HostsPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/hosts",
	        "LogPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb-json.log",
	        "Name": "/multinode-890146",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "multinode-890146:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "multinode-890146",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/merged",
	                "UpperDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/diff",
	                "WorkDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "multinode-890146",
	                "Source": "/var/lib/docker/volumes/multinode-890146/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "multinode-890146",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "multinode-890146",
	                "name.minikube.sigs.k8s.io": "multinode-890146",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "435c30f14c38000575965d33c99ca27815d0d91f5250deffde5cddcb8e65dca9",
	            "SandboxKey": "/var/run/docker/netns/435c30f14c38",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40717"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40718"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40721"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40719"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40720"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "multinode-890146": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.58.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:3a:02",
	                    "DriverOpts": null,
	                    "NetworkID": "b138f637362d33b7ccebcd9c06d6cdaa35c434cdf582fc761f98e8246e8681cc",
	                    "EndpointID": "1ef64936ec7ebb2090965d084803451acee533d9e380198f704779360ea5dcdb",
	                    "Gateway": "192.168.58.1",
	                    "IPAddress": "192.168.58.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "multinode-890146",
	                        "d045dde36e30"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p multinode-890146 -n multinode-890146
helpers_test.go:244: <<< TestMultiNode/serial/StartAfterStop FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/StartAfterStop]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p multinode-890146 logs -n 25: (1.425428904s)
helpers_test.go:252: TestMultiNode/serial/StartAfterStop logs: 
-- stdout --
	
	==> Audit <==
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| Command |                                          Args                                           |     Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| cp      | multinode-890146 cp multinode-890146:/home/docker/cp-test.txt                           | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146-m03:/home/docker/cp-test_multinode-890146_multinode-890146-m03.txt     |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146 sudo cat                                                               |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146-m03 sudo cat                                   | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | /home/docker/cp-test_multinode-890146_multinode-890146-m03.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp testdata/cp-test.txt                                                | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146-m02:/home/docker/cp-test.txt                                           |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m02:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | /tmp/TestMultiNodeserialCopyFile3892048771/001/cp-test_multinode-890146-m02.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m02:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146:/home/docker/cp-test_multinode-890146-m02_multinode-890146.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146 sudo cat                                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | /home/docker/cp-test_multinode-890146-m02_multinode-890146.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m02:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03:/home/docker/cp-test_multinode-890146-m02_multinode-890146-m03.txt |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146-m03 sudo cat                                   | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /home/docker/cp-test_multinode-890146-m02_multinode-890146-m03.txt                      |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp testdata/cp-test.txt                                                | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03:/home/docker/cp-test.txt                                           |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /tmp/TestMultiNodeserialCopyFile3892048771/001/cp-test_multinode-890146-m03.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146:/home/docker/cp-test_multinode-890146-m03_multinode-890146.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146 sudo cat                                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /home/docker/cp-test_multinode-890146-m03_multinode-890146.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m02:/home/docker/cp-test_multinode-890146-m03_multinode-890146-m02.txt |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146-m02 sudo cat                                   | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /home/docker/cp-test_multinode-890146-m03_multinode-890146-m02.txt                      |                  |         |         |                     |                     |
	| node    | multinode-890146 node stop m03                                                          | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	| node    | multinode-890146 node start                                                             | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | m03 -v=7 --alsologtostderr                                                              |                  |         |         |                     |                     |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:07:04
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:07:04.710961 2175536 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:07:04.711406 2175536 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:07:04.711441 2175536 out.go:358] Setting ErrFile to fd 2...
	I0916 11:07:04.711461 2175536 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:07:04.711736 2175536 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 11:07:04.712191 2175536 out.go:352] Setting JSON to false
	I0916 11:07:04.713227 2175536 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":139767,"bootTime":1726345058,"procs":190,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 11:07:04.713322 2175536 start.go:139] virtualization:  
	I0916 11:07:04.716025 2175536 out.go:177] * [multinode-890146] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:07:04.718481 2175536 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:07:04.718608 2175536 notify.go:220] Checking for updates...
	I0916 11:07:04.722309 2175536 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:07:04.724604 2175536 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:07:04.726247 2175536 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 11:07:04.728060 2175536 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:07:04.730011 2175536 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:07:04.732069 2175536 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:07:04.754782 2175536 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:07:04.754909 2175536 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:07:04.815407 2175536 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:41 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 11:07:04.805742665 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:07:04.815518 2175536 docker.go:318] overlay module found
	I0916 11:07:04.817918 2175536 out.go:177] * Using the docker driver based on user configuration
	I0916 11:07:04.819852 2175536 start.go:297] selected driver: docker
	I0916 11:07:04.819879 2175536 start.go:901] validating driver "docker" against <nil>
	I0916 11:07:04.819896 2175536 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:07:04.820612 2175536 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:07:04.882378 2175536 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:41 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 11:07:04.872960095 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:07:04.882594 2175536 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 11:07:04.882857 2175536 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:07:04.884899 2175536 out.go:177] * Using Docker driver with root privileges
	I0916 11:07:04.886500 2175536 cni.go:84] Creating CNI manager for ""
	I0916 11:07:04.886562 2175536 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0916 11:07:04.886576 2175536 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 11:07:04.886657 2175536 start.go:340] cluster config:
	{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP:
SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:07:04.890071 2175536 out.go:177] * Starting "multinode-890146" primary control-plane node in "multinode-890146" cluster
	I0916 11:07:04.891719 2175536 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:07:04.893462 2175536 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:07:04.895451 2175536 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:07:04.895512 2175536 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 11:07:04.895524 2175536 cache.go:56] Caching tarball of preloaded images
	I0916 11:07:04.895532 2175536 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:07:04.895606 2175536 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 11:07:04.895616 2175536 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 11:07:04.895985 2175536 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:07:04.896016 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json: {Name:mk1dc06c1476fc6d0ac1387b52d1026d9e3527d7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	W0916 11:07:04.915293 2175536 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:07:04.915318 2175536 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:07:04.915390 2175536 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:07:04.915415 2175536 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:07:04.915427 2175536 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:07:04.915435 2175536 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:07:04.915440 2175536 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:07:04.916723 2175536 image.go:273] response: 
	I0916 11:07:05.038443 2175536 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:07:05.038485 2175536 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:07:05.038517 2175536 start.go:360] acquireMachinesLock for multinode-890146: {Name:mk50282545d8a591b3d758c5d48e2059a356819d Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:07:05.038643 2175536 start.go:364] duration metric: took 103.442µs to acquireMachinesLock for "multinode-890146"
	I0916 11:07:05.038698 2175536 start.go:93] Provisioning new machine with config: &{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetri
cs:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:07:05.038886 2175536 start.go:125] createHost starting for "" (driver="docker")
	I0916 11:07:05.041773 2175536 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 11:07:05.042034 2175536 start.go:159] libmachine.API.Create for "multinode-890146" (driver="docker")
	I0916 11:07:05.042067 2175536 client.go:168] LocalClient.Create starting
	I0916 11:07:05.042152 2175536 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 11:07:05.042187 2175536 main.go:141] libmachine: Decoding PEM data...
	I0916 11:07:05.042204 2175536 main.go:141] libmachine: Parsing certificate...
	I0916 11:07:05.042263 2175536 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 11:07:05.042288 2175536 main.go:141] libmachine: Decoding PEM data...
	I0916 11:07:05.042303 2175536 main.go:141] libmachine: Parsing certificate...
	I0916 11:07:05.042733 2175536 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 11:07:05.058191 2175536 cli_runner.go:211] docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 11:07:05.058277 2175536 network_create.go:284] running [docker network inspect multinode-890146] to gather additional debugging logs...
	I0916 11:07:05.058297 2175536 cli_runner.go:164] Run: docker network inspect multinode-890146
	W0916 11:07:05.074895 2175536 cli_runner.go:211] docker network inspect multinode-890146 returned with exit code 1
	I0916 11:07:05.074951 2175536 network_create.go:287] error running [docker network inspect multinode-890146]: docker network inspect multinode-890146: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network multinode-890146 not found
	I0916 11:07:05.074966 2175536 network_create.go:289] output of [docker network inspect multinode-890146]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network multinode-890146 not found
	
	** /stderr **
	I0916 11:07:05.075080 2175536 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:07:05.091698 2175536 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-941929ec13d1 IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:02:42:32:84:fe:19} reservation:<nil>}
	I0916 11:07:05.092436 2175536 network.go:206] using free private subnet 192.168.58.0/24: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x4001874180}
	I0916 11:07:05.092465 2175536 network_create.go:124] attempt to create docker network multinode-890146 192.168.58.0/24 with gateway 192.168.58.1 and MTU of 1500 ...
	I0916 11:07:05.092523 2175536 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.58.0/24 --gateway=192.168.58.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=multinode-890146 multinode-890146
	I0916 11:07:05.169697 2175536 network_create.go:108] docker network multinode-890146 192.168.58.0/24 created
	I0916 11:07:05.169733 2175536 kic.go:121] calculated static IP "192.168.58.2" for the "multinode-890146" container
	I0916 11:07:05.169807 2175536 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:07:05.187546 2175536 cli_runner.go:164] Run: docker volume create multinode-890146 --label name.minikube.sigs.k8s.io=multinode-890146 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:07:05.204604 2175536 oci.go:103] Successfully created a docker volume multinode-890146
	I0916 11:07:05.204713 2175536 cli_runner.go:164] Run: docker run --rm --name multinode-890146-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-890146 --entrypoint /usr/bin/test -v multinode-890146:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:07:05.765543 2175536 oci.go:107] Successfully prepared a docker volume multinode-890146
	I0916 11:07:05.765593 2175536 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:07:05.765614 2175536 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:07:05.765685 2175536 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-890146:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:07:09.815871 2175536 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-890146:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.050143051s)
	I0916 11:07:09.815905 2175536 kic.go:203] duration metric: took 4.050287182s to extract preloaded images to volume ...
	W0916 11:07:09.816058 2175536 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:07:09.816175 2175536 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:07:09.867064 2175536 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname multinode-890146 --name multinode-890146 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-890146 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=multinode-890146 --network multinode-890146 --ip 192.168.58.2 --volume multinode-890146:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:07:10.200352 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Running}}
	I0916 11:07:10.227902 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:10.250171 2175536 cli_runner.go:164] Run: docker exec multinode-890146 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:07:10.322219 2175536 oci.go:144] the created container "multinode-890146" has a running status.
	I0916 11:07:10.322248 2175536 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa...
	I0916 11:07:11.953902 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 11:07:11.953955 2175536 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:07:11.973438 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:11.989923 2175536 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:07:11.989951 2175536 kic_runner.go:114] Args: [docker exec --privileged multinode-890146 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:07:12.043688 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:12.063046 2175536 machine.go:93] provisionDockerMachine start ...
	I0916 11:07:12.063151 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:12.081954 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:07:12.082271 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40717 <nil> <nil>}
	I0916 11:07:12.082287 2175536 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:07:12.222205 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146
	
	I0916 11:07:12.222232 2175536 ubuntu.go:169] provisioning hostname "multinode-890146"
	I0916 11:07:12.222304 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:12.238921 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:07:12.239170 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40717 <nil> <nil>}
	I0916 11:07:12.239187 2175536 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-890146 && echo "multinode-890146" | sudo tee /etc/hostname
	I0916 11:07:12.386518 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146
	
	I0916 11:07:12.386619 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:12.404224 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:07:12.404534 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40717 <nil> <nil>}
	I0916 11:07:12.404566 2175536 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-890146' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-890146/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-890146' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:07:12.551490 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:07:12.551519 2175536 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 11:07:12.551548 2175536 ubuntu.go:177] setting up certificates
	I0916 11:07:12.551557 2175536 provision.go:84] configureAuth start
	I0916 11:07:12.551617 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:07:12.568710 2175536 provision.go:143] copyHostCerts
	I0916 11:07:12.568765 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:07:12.568800 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 11:07:12.568808 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:07:12.568889 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 11:07:12.568965 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:07:12.568981 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 11:07:12.568985 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:07:12.569022 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 11:07:12.569060 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:07:12.569077 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 11:07:12.569081 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:07:12.569108 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 11:07:12.569152 2175536 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.multinode-890146 san=[127.0.0.1 192.168.58.2 localhost minikube multinode-890146]
	I0916 11:07:13.054906 2175536 provision.go:177] copyRemoteCerts
	I0916 11:07:13.054975 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:07:13.055018 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:13.074045 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:13.171530 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:07:13.171589 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 11:07:13.195905 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:07:13.195969 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1216 bytes)
	I0916 11:07:13.219875 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:07:13.219955 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:07:13.244593 2175536 provision.go:87] duration metric: took 693.022182ms to configureAuth
	I0916 11:07:13.244627 2175536 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:07:13.244830 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:07:13.244844 2175536 machine.go:96] duration metric: took 1.181777353s to provisionDockerMachine
	I0916 11:07:13.244852 2175536 client.go:171] duration metric: took 8.202778601s to LocalClient.Create
	I0916 11:07:13.244865 2175536 start.go:167] duration metric: took 8.20283387s to libmachine.API.Create "multinode-890146"
	I0916 11:07:13.244877 2175536 start.go:293] postStartSetup for "multinode-890146" (driver="docker")
	I0916 11:07:13.244887 2175536 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:07:13.244944 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:07:13.244989 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:13.262952 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:13.359972 2175536 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:07:13.363266 2175536 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:07:13.363334 2175536 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:07:13.363347 2175536 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:07:13.363353 2175536 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:07:13.363359 2175536 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:07:13.363363 2175536 command_runner.go:130] > ID=ubuntu
	I0916 11:07:13.363367 2175536 command_runner.go:130] > ID_LIKE=debian
	I0916 11:07:13.363371 2175536 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:07:13.363376 2175536 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:07:13.363384 2175536 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:07:13.363394 2175536 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:07:13.363405 2175536 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:07:13.363458 2175536 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:07:13.363499 2175536 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:07:13.363514 2175536 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:07:13.363522 2175536 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:07:13.363536 2175536 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 11:07:13.363599 2175536 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 11:07:13.363680 2175536 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 11:07:13.363692 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 11:07:13.363806 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:07:13.372570 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:07:13.397437 2175536 start.go:296] duration metric: took 152.543836ms for postStartSetup
	I0916 11:07:13.397802 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:07:13.414034 2175536 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:07:13.414328 2175536 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:07:13.414377 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:13.431292 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:13.523771 2175536 command_runner.go:130] > 21%
	I0916 11:07:13.523863 2175536 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:07:13.528222 2175536 command_runner.go:130] > 154G
	I0916 11:07:13.528717 2175536 start.go:128] duration metric: took 8.489816181s to createHost
	I0916 11:07:13.528744 2175536 start.go:83] releasing machines lock for "multinode-890146", held for 8.490079023s
	I0916 11:07:13.528823 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:07:13.546503 2175536 ssh_runner.go:195] Run: cat /version.json
	I0916 11:07:13.546565 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:13.546570 2175536 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:07:13.546651 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:13.566271 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:13.580236 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:13.789090 2175536 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:07:13.789167 2175536 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 11:07:13.789364 2175536 ssh_runner.go:195] Run: systemctl --version
	I0916 11:07:13.793341 2175536 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 11:07:13.793376 2175536 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 11:07:13.793760 2175536 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:07:13.797426 2175536 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:07:13.797449 2175536 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:07:13.797464 2175536 command_runner.go:130] > Device: 3ch/60d	Inode: 1301117     Links: 1
	I0916 11:07:13.797471 2175536 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:07:13.797478 2175536 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:07:13.797483 2175536 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:07:13.797488 2175536 command_runner.go:130] > Change: 2024-09-16 10:29:56.970000846 +0000
	I0916 11:07:13.797494 2175536 command_runner.go:130] >  Birth: 2024-09-16 10:29:56.970000846 +0000
	I0916 11:07:13.797773 2175536 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 11:07:13.822367 2175536 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:07:13.822480 2175536 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:07:13.850861 2175536 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf, 
	I0916 11:07:13.850919 2175536 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:07:13.850928 2175536 start.go:495] detecting cgroup driver to use...
	I0916 11:07:13.850973 2175536 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:07:13.851029 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 11:07:13.864220 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 11:07:13.875991 2175536 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:07:13.876079 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:07:13.890034 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:07:13.905601 2175536 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:07:13.997977 2175536 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:07:14.107738 2175536 command_runner.go:130] ! Created symlink /etc/systemd/system/cri-docker.service → /dev/null.
	I0916 11:07:14.107771 2175536 docker.go:233] disabling docker service ...
	I0916 11:07:14.107824 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:07:14.130375 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:07:14.142958 2175536 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:07:14.233252 2175536 command_runner.go:130] ! Removed /etc/systemd/system/sockets.target.wants/docker.socket.
	I0916 11:07:14.233395 2175536 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:07:14.338360 2175536 command_runner.go:130] ! Created symlink /etc/systemd/system/docker.service → /dev/null.
	I0916 11:07:14.338546 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:07:14.351624 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:07:14.367588 2175536 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0916 11:07:14.368947 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 11:07:14.379802 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 11:07:14.389657 2175536 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 11:07:14.389778 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 11:07:14.399737 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:07:14.409839 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 11:07:14.419858 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:07:14.429710 2175536 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:07:14.439343 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 11:07:14.449034 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 11:07:14.458507 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 11:07:14.468646 2175536 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:07:14.476639 2175536 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:07:14.477555 2175536 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:07:14.486240 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:07:14.573778 2175536 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 11:07:14.706938 2175536 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 11:07:14.707066 2175536 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 11:07:14.710670 2175536 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0916 11:07:14.710780 2175536 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:07:14.710802 2175536 command_runner.go:130] > Device: 45h/69d	Inode: 175         Links: 1
	I0916 11:07:14.710839 2175536 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:07:14.710863 2175536 command_runner.go:130] > Access: 2024-09-16 11:07:14.656361543 +0000
	I0916 11:07:14.710882 2175536 command_runner.go:130] > Modify: 2024-09-16 11:07:14.656361543 +0000
	I0916 11:07:14.710905 2175536 command_runner.go:130] > Change: 2024-09-16 11:07:14.656361543 +0000
	I0916 11:07:14.710931 2175536 command_runner.go:130] >  Birth: -
	I0916 11:07:14.710991 2175536 start.go:563] Will wait 60s for crictl version
	I0916 11:07:14.711072 2175536 ssh_runner.go:195] Run: which crictl
	I0916 11:07:14.714603 2175536 command_runner.go:130] > /usr/bin/crictl
	I0916 11:07:14.714721 2175536 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:07:14.752930 2175536 command_runner.go:130] > Version:  0.1.0
	I0916 11:07:14.752950 2175536 command_runner.go:130] > RuntimeName:  containerd
	I0916 11:07:14.752972 2175536 command_runner.go:130] > RuntimeVersion:  1.7.22
	I0916 11:07:14.752977 2175536 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:07:14.755746 2175536 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 11:07:14.755815 2175536 ssh_runner.go:195] Run: containerd --version
	I0916 11:07:14.775668 2175536 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:07:14.777251 2175536 ssh_runner.go:195] Run: containerd --version
	I0916 11:07:14.797312 2175536 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:07:14.801130 2175536 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 11:07:14.802981 2175536 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:07:14.818100 2175536 ssh_runner.go:195] Run: grep 192.168.58.1	host.minikube.internal$ /etc/hosts
	I0916 11:07:14.821749 2175536 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.58.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:07:14.832523 2175536 kubeadm.go:883] updating cluster {Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fals
e CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:07:14.832641 2175536 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:07:14.832706 2175536 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:07:14.865240 2175536 command_runner.go:130] > {
	I0916 11:07:14.865259 2175536 command_runner.go:130] >   "images": [
	I0916 11:07:14.865263 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865273 2175536 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:07:14.865278 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865286 2175536 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:07:14.865290 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865294 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865303 2175536 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:07:14.865306 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865310 2175536 command_runner.go:130] >       "size": "33309097",
	I0916 11:07:14.865314 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.865317 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865321 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865324 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865332 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865335 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865345 2175536 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:07:14.865349 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865354 2175536 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:07:14.865357 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865361 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865369 2175536 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:07:14.865372 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865376 2175536 command_runner.go:130] >       "size": "8034419",
	I0916 11:07:14.865379 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.865383 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865386 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865390 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865393 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865395 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865402 2175536 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:07:14.865406 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865413 2175536 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:07:14.865417 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865420 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865428 2175536 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:07:14.865431 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865435 2175536 command_runner.go:130] >       "size": "16948420",
	I0916 11:07:14.865438 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.865442 2175536 command_runner.go:130] >       "username": "nonroot",
	I0916 11:07:14.865446 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865450 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865453 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865456 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865462 2175536 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:07:14.865466 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865471 2175536 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:07:14.865478 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865482 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865490 2175536 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 11:07:14.865493 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865496 2175536 command_runner.go:130] >       "size": "66535646",
	I0916 11:07:14.865500 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.865503 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.865506 2175536 command_runner.go:130] >       },
	I0916 11:07:14.865510 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865513 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865517 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865519 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865522 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865529 2175536 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:07:14.865532 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865537 2175536 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:07:14.865540 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865544 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865556 2175536 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 11:07:14.865559 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865562 2175536 command_runner.go:130] >       "size": "25687130",
	I0916 11:07:14.865566 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.865569 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.865572 2175536 command_runner.go:130] >       },
	I0916 11:07:14.865576 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865579 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865583 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865585 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865588 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865595 2175536 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:07:14.865598 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865603 2175536 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:07:14.865606 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865616 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865624 2175536 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 11:07:14.865628 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865631 2175536 command_runner.go:130] >       "size": "23948670",
	I0916 11:07:14.865635 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.865638 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.865641 2175536 command_runner.go:130] >       },
	I0916 11:07:14.865644 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865650 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865654 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865656 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865659 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865666 2175536 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:07:14.865669 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865674 2175536 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:07:14.865679 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865682 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865690 2175536 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 11:07:14.865693 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865697 2175536 command_runner.go:130] >       "size": "26756812",
	I0916 11:07:14.865700 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.865703 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865707 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865710 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865713 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865716 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865722 2175536 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:07:14.865726 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865731 2175536 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:07:14.865734 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865737 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865745 2175536 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:07:14.865749 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865756 2175536 command_runner.go:130] >       "size": "18507674",
	I0916 11:07:14.865759 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.865763 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.865766 2175536 command_runner.go:130] >       },
	I0916 11:07:14.865769 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865772 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865776 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.865779 2175536 command_runner.go:130] >     },
	I0916 11:07:14.865782 2175536 command_runner.go:130] >     {
	I0916 11:07:14.865788 2175536 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:07:14.865792 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.865796 2175536 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:07:14.865799 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865802 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.865810 2175536 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:07:14.865813 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.865816 2175536 command_runner.go:130] >       "size": "267933",
	I0916 11:07:14.865820 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.865824 2175536 command_runner.go:130] >         "value": "65535"
	I0916 11:07:14.865827 2175536 command_runner.go:130] >       },
	I0916 11:07:14.865830 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.865834 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.865837 2175536 command_runner.go:130] >       "pinned": true
	I0916 11:07:14.865840 2175536 command_runner.go:130] >     }
	I0916 11:07:14.865843 2175536 command_runner.go:130] >   ]
	I0916 11:07:14.865846 2175536 command_runner.go:130] > }
	I0916 11:07:14.868316 2175536 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 11:07:14.868340 2175536 containerd.go:534] Images already preloaded, skipping extraction
	I0916 11:07:14.868401 2175536 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:07:14.905234 2175536 command_runner.go:130] > {
	I0916 11:07:14.905255 2175536 command_runner.go:130] >   "images": [
	I0916 11:07:14.905260 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905272 2175536 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:07:14.905295 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905301 2175536 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:07:14.905304 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905308 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905317 2175536 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:07:14.905321 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905325 2175536 command_runner.go:130] >       "size": "33309097",
	I0916 11:07:14.905329 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.905333 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905337 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905341 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905344 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905348 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905356 2175536 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:07:14.905360 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905365 2175536 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:07:14.905368 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905372 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905381 2175536 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:07:14.905384 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905390 2175536 command_runner.go:130] >       "size": "8034419",
	I0916 11:07:14.905394 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.905399 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905402 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905406 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905412 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905415 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905428 2175536 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:07:14.905467 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905474 2175536 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:07:14.905477 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905483 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905497 2175536 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:07:14.905508 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905512 2175536 command_runner.go:130] >       "size": "16948420",
	I0916 11:07:14.905515 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.905520 2175536 command_runner.go:130] >       "username": "nonroot",
	I0916 11:07:14.905523 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905529 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905532 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905535 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905541 2175536 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:07:14.905545 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905549 2175536 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:07:14.905554 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905558 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905565 2175536 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 11:07:14.905568 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905572 2175536 command_runner.go:130] >       "size": "66535646",
	I0916 11:07:14.905575 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.905578 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.905581 2175536 command_runner.go:130] >       },
	I0916 11:07:14.905585 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905588 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905592 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905595 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905598 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905604 2175536 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:07:14.905608 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905613 2175536 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:07:14.905616 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905620 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905633 2175536 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 11:07:14.905638 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905642 2175536 command_runner.go:130] >       "size": "25687130",
	I0916 11:07:14.905645 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.905652 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.905655 2175536 command_runner.go:130] >       },
	I0916 11:07:14.905658 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905662 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905665 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905668 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905671 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905677 2175536 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:07:14.905681 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905686 2175536 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:07:14.905689 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905693 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905701 2175536 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 11:07:14.905704 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905708 2175536 command_runner.go:130] >       "size": "23948670",
	I0916 11:07:14.905711 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.905714 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.905717 2175536 command_runner.go:130] >       },
	I0916 11:07:14.905721 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905726 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905730 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905732 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905735 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905742 2175536 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:07:14.905745 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905750 2175536 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:07:14.905753 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905757 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905764 2175536 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 11:07:14.905767 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905771 2175536 command_runner.go:130] >       "size": "26756812",
	I0916 11:07:14.905774 2175536 command_runner.go:130] >       "uid": null,
	I0916 11:07:14.905778 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905784 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905788 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905791 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905794 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905800 2175536 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:07:14.905804 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905809 2175536 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:07:14.905812 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905815 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905823 2175536 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:07:14.905826 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905830 2175536 command_runner.go:130] >       "size": "18507674",
	I0916 11:07:14.905833 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.905836 2175536 command_runner.go:130] >         "value": "0"
	I0916 11:07:14.905839 2175536 command_runner.go:130] >       },
	I0916 11:07:14.905843 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905846 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905850 2175536 command_runner.go:130] >       "pinned": false
	I0916 11:07:14.905853 2175536 command_runner.go:130] >     },
	I0916 11:07:14.905856 2175536 command_runner.go:130] >     {
	I0916 11:07:14.905862 2175536 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:07:14.905866 2175536 command_runner.go:130] >       "repoTags": [
	I0916 11:07:14.905870 2175536 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:07:14.905873 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905877 2175536 command_runner.go:130] >       "repoDigests": [
	I0916 11:07:14.905884 2175536 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:07:14.905887 2175536 command_runner.go:130] >       ],
	I0916 11:07:14.905892 2175536 command_runner.go:130] >       "size": "267933",
	I0916 11:07:14.905895 2175536 command_runner.go:130] >       "uid": {
	I0916 11:07:14.905899 2175536 command_runner.go:130] >         "value": "65535"
	I0916 11:07:14.905903 2175536 command_runner.go:130] >       },
	I0916 11:07:14.905907 2175536 command_runner.go:130] >       "username": "",
	I0916 11:07:14.905910 2175536 command_runner.go:130] >       "spec": null,
	I0916 11:07:14.905917 2175536 command_runner.go:130] >       "pinned": true
	I0916 11:07:14.905920 2175536 command_runner.go:130] >     }
	I0916 11:07:14.905923 2175536 command_runner.go:130] >   ]
	I0916 11:07:14.905927 2175536 command_runner.go:130] > }
	I0916 11:07:14.908255 2175536 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 11:07:14.908275 2175536 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:07:14.908283 2175536 kubeadm.go:934] updating node { 192.168.58.2 8443 v1.31.1 containerd true true} ...
	I0916 11:07:14.908389 2175536 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-890146 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.58.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:07:14.908461 2175536 ssh_runner.go:195] Run: sudo crictl info
	I0916 11:07:14.944258 2175536 command_runner.go:130] > {
	I0916 11:07:14.944281 2175536 command_runner.go:130] >   "status": {
	I0916 11:07:14.944287 2175536 command_runner.go:130] >     "conditions": [
	I0916 11:07:14.944291 2175536 command_runner.go:130] >       {
	I0916 11:07:14.944296 2175536 command_runner.go:130] >         "type": "RuntimeReady",
	I0916 11:07:14.944300 2175536 command_runner.go:130] >         "status": true,
	I0916 11:07:14.944304 2175536 command_runner.go:130] >         "reason": "",
	I0916 11:07:14.944309 2175536 command_runner.go:130] >         "message": ""
	I0916 11:07:14.944315 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944318 2175536 command_runner.go:130] >       {
	I0916 11:07:14.944323 2175536 command_runner.go:130] >         "type": "NetworkReady",
	I0916 11:07:14.944326 2175536 command_runner.go:130] >         "status": true,
	I0916 11:07:14.944330 2175536 command_runner.go:130] >         "reason": "",
	I0916 11:07:14.944335 2175536 command_runner.go:130] >         "message": ""
	I0916 11:07:14.944343 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944347 2175536 command_runner.go:130] >       {
	I0916 11:07:14.944352 2175536 command_runner.go:130] >         "type": "ContainerdHasNoDeprecationWarnings",
	I0916 11:07:14.944356 2175536 command_runner.go:130] >         "status": true,
	I0916 11:07:14.944360 2175536 command_runner.go:130] >         "reason": "",
	I0916 11:07:14.944366 2175536 command_runner.go:130] >         "message": ""
	I0916 11:07:14.944369 2175536 command_runner.go:130] >       }
	I0916 11:07:14.944372 2175536 command_runner.go:130] >     ]
	I0916 11:07:14.944375 2175536 command_runner.go:130] >   },
	I0916 11:07:14.944379 2175536 command_runner.go:130] >   "cniconfig": {
	I0916 11:07:14.944384 2175536 command_runner.go:130] >     "PluginDirs": [
	I0916 11:07:14.944388 2175536 command_runner.go:130] >       "/opt/cni/bin"
	I0916 11:07:14.944396 2175536 command_runner.go:130] >     ],
	I0916 11:07:14.944404 2175536 command_runner.go:130] >     "PluginConfDir": "/etc/cni/net.d",
	I0916 11:07:14.944411 2175536 command_runner.go:130] >     "PluginMaxConfNum": 1,
	I0916 11:07:14.944414 2175536 command_runner.go:130] >     "Prefix": "eth",
	I0916 11:07:14.944418 2175536 command_runner.go:130] >     "Networks": [
	I0916 11:07:14.944421 2175536 command_runner.go:130] >       {
	I0916 11:07:14.944424 2175536 command_runner.go:130] >         "Config": {
	I0916 11:07:14.944428 2175536 command_runner.go:130] >           "Name": "cni-loopback",
	I0916 11:07:14.944432 2175536 command_runner.go:130] >           "CNIVersion": "0.3.1",
	I0916 11:07:14.944436 2175536 command_runner.go:130] >           "Plugins": [
	I0916 11:07:14.944439 2175536 command_runner.go:130] >             {
	I0916 11:07:14.944443 2175536 command_runner.go:130] >               "Network": {
	I0916 11:07:14.944447 2175536 command_runner.go:130] >                 "type": "loopback",
	I0916 11:07:14.944453 2175536 command_runner.go:130] >                 "ipam": {},
	I0916 11:07:14.944462 2175536 command_runner.go:130] >                 "dns": {}
	I0916 11:07:14.944466 2175536 command_runner.go:130] >               },
	I0916 11:07:14.944471 2175536 command_runner.go:130] >               "Source": "{\"type\":\"loopback\"}"
	I0916 11:07:14.944482 2175536 command_runner.go:130] >             }
	I0916 11:07:14.944485 2175536 command_runner.go:130] >           ],
	I0916 11:07:14.944498 2175536 command_runner.go:130] >           "Source": "{\n\"cniVersion\": \"0.3.1\",\n\"name\": \"cni-loopback\",\n\"plugins\": [{\n  \"type\": \"loopback\"\n}]\n}"
	I0916 11:07:14.944504 2175536 command_runner.go:130] >         },
	I0916 11:07:14.944508 2175536 command_runner.go:130] >         "IFName": "lo"
	I0916 11:07:14.944511 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944514 2175536 command_runner.go:130] >       {
	I0916 11:07:14.944517 2175536 command_runner.go:130] >         "Config": {
	I0916 11:07:14.944522 2175536 command_runner.go:130] >           "Name": "loopback",
	I0916 11:07:14.944526 2175536 command_runner.go:130] >           "CNIVersion": "1.0.0",
	I0916 11:07:14.944533 2175536 command_runner.go:130] >           "Plugins": [
	I0916 11:07:14.944537 2175536 command_runner.go:130] >             {
	I0916 11:07:14.944541 2175536 command_runner.go:130] >               "Network": {
	I0916 11:07:14.944547 2175536 command_runner.go:130] >                 "cniVersion": "1.0.0",
	I0916 11:07:14.944552 2175536 command_runner.go:130] >                 "name": "loopback",
	I0916 11:07:14.944556 2175536 command_runner.go:130] >                 "type": "loopback",
	I0916 11:07:14.944572 2175536 command_runner.go:130] >                 "ipam": {},
	I0916 11:07:14.944578 2175536 command_runner.go:130] >                 "dns": {}
	I0916 11:07:14.944583 2175536 command_runner.go:130] >               },
	I0916 11:07:14.944591 2175536 command_runner.go:130] >               "Source": "{\"cniVersion\":\"1.0.0\",\"name\":\"loopback\",\"type\":\"loopback\"}"
	I0916 11:07:14.944595 2175536 command_runner.go:130] >             }
	I0916 11:07:14.944598 2175536 command_runner.go:130] >           ],
	I0916 11:07:14.944608 2175536 command_runner.go:130] >           "Source": "{\"cniVersion\":\"1.0.0\",\"name\":\"loopback\",\"plugins\":[{\"cniVersion\":\"1.0.0\",\"name\":\"loopback\",\"type\":\"loopback\"}]}"
	I0916 11:07:14.944612 2175536 command_runner.go:130] >         },
	I0916 11:07:14.944616 2175536 command_runner.go:130] >         "IFName": "eth0"
	I0916 11:07:14.944622 2175536 command_runner.go:130] >       }
	I0916 11:07:14.944625 2175536 command_runner.go:130] >     ]
	I0916 11:07:14.944628 2175536 command_runner.go:130] >   },
	I0916 11:07:14.944631 2175536 command_runner.go:130] >   "config": {
	I0916 11:07:14.944635 2175536 command_runner.go:130] >     "containerd": {
	I0916 11:07:14.944642 2175536 command_runner.go:130] >       "snapshotter": "overlayfs",
	I0916 11:07:14.944647 2175536 command_runner.go:130] >       "defaultRuntimeName": "runc",
	I0916 11:07:14.944659 2175536 command_runner.go:130] >       "defaultRuntime": {
	I0916 11:07:14.944663 2175536 command_runner.go:130] >         "runtimeType": "",
	I0916 11:07:14.944668 2175536 command_runner.go:130] >         "runtimePath": "",
	I0916 11:07:14.944673 2175536 command_runner.go:130] >         "runtimeEngine": "",
	I0916 11:07:14.944677 2175536 command_runner.go:130] >         "PodAnnotations": null,
	I0916 11:07:14.944687 2175536 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 11:07:14.944697 2175536 command_runner.go:130] >         "runtimeRoot": "",
	I0916 11:07:14.944701 2175536 command_runner.go:130] >         "options": null,
	I0916 11:07:14.944706 2175536 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 11:07:14.944715 2175536 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:07:14.944723 2175536 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 11:07:14.944727 2175536 command_runner.go:130] >         "cniConfDir": "",
	I0916 11:07:14.944732 2175536 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 11:07:14.944739 2175536 command_runner.go:130] >         "snapshotter": "",
	I0916 11:07:14.944743 2175536 command_runner.go:130] >         "sandboxMode": ""
	I0916 11:07:14.944746 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944756 2175536 command_runner.go:130] >       "untrustedWorkloadRuntime": {
	I0916 11:07:14.944765 2175536 command_runner.go:130] >         "runtimeType": "",
	I0916 11:07:14.944771 2175536 command_runner.go:130] >         "runtimePath": "",
	I0916 11:07:14.944776 2175536 command_runner.go:130] >         "runtimeEngine": "",
	I0916 11:07:14.944780 2175536 command_runner.go:130] >         "PodAnnotations": null,
	I0916 11:07:14.944784 2175536 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 11:07:14.944788 2175536 command_runner.go:130] >         "runtimeRoot": "",
	I0916 11:07:14.944792 2175536 command_runner.go:130] >         "options": null,
	I0916 11:07:14.944797 2175536 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 11:07:14.944803 2175536 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:07:14.944810 2175536 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 11:07:14.944814 2175536 command_runner.go:130] >         "cniConfDir": "",
	I0916 11:07:14.944818 2175536 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 11:07:14.944824 2175536 command_runner.go:130] >         "snapshotter": "",
	I0916 11:07:14.944830 2175536 command_runner.go:130] >         "sandboxMode": ""
	I0916 11:07:14.944833 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944841 2175536 command_runner.go:130] >       "runtimes": {
	I0916 11:07:14.944847 2175536 command_runner.go:130] >         "runc": {
	I0916 11:07:14.944851 2175536 command_runner.go:130] >           "runtimeType": "io.containerd.runc.v2",
	I0916 11:07:14.944858 2175536 command_runner.go:130] >           "runtimePath": "",
	I0916 11:07:14.944864 2175536 command_runner.go:130] >           "runtimeEngine": "",
	I0916 11:07:14.944868 2175536 command_runner.go:130] >           "PodAnnotations": null,
	I0916 11:07:14.944873 2175536 command_runner.go:130] >           "ContainerAnnotations": null,
	I0916 11:07:14.944877 2175536 command_runner.go:130] >           "runtimeRoot": "",
	I0916 11:07:14.944881 2175536 command_runner.go:130] >           "options": {
	I0916 11:07:14.944885 2175536 command_runner.go:130] >             "SystemdCgroup": false
	I0916 11:07:14.944891 2175536 command_runner.go:130] >           },
	I0916 11:07:14.944901 2175536 command_runner.go:130] >           "privileged_without_host_devices": false,
	I0916 11:07:14.944909 2175536 command_runner.go:130] >           "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:07:14.944913 2175536 command_runner.go:130] >           "baseRuntimeSpec": "",
	I0916 11:07:14.944917 2175536 command_runner.go:130] >           "cniConfDir": "",
	I0916 11:07:14.944923 2175536 command_runner.go:130] >           "cniMaxConfNum": 0,
	I0916 11:07:14.944927 2175536 command_runner.go:130] >           "snapshotter": "",
	I0916 11:07:14.944935 2175536 command_runner.go:130] >           "sandboxMode": "podsandbox"
	I0916 11:07:14.944947 2175536 command_runner.go:130] >         }
	I0916 11:07:14.944953 2175536 command_runner.go:130] >       },
	I0916 11:07:14.944957 2175536 command_runner.go:130] >       "noPivot": false,
	I0916 11:07:14.944963 2175536 command_runner.go:130] >       "disableSnapshotAnnotations": true,
	I0916 11:07:14.944968 2175536 command_runner.go:130] >       "discardUnpackedLayers": true,
	I0916 11:07:14.944977 2175536 command_runner.go:130] >       "ignoreBlockIONotEnabledErrors": false,
	I0916 11:07:14.944985 2175536 command_runner.go:130] >       "ignoreRdtNotEnabledErrors": false
	I0916 11:07:14.944987 2175536 command_runner.go:130] >     },
	I0916 11:07:14.944993 2175536 command_runner.go:130] >     "cni": {
	I0916 11:07:14.944997 2175536 command_runner.go:130] >       "binDir": "/opt/cni/bin",
	I0916 11:07:14.945002 2175536 command_runner.go:130] >       "confDir": "/etc/cni/net.d",
	I0916 11:07:14.945006 2175536 command_runner.go:130] >       "maxConfNum": 1,
	I0916 11:07:14.945013 2175536 command_runner.go:130] >       "setupSerially": false,
	I0916 11:07:14.945022 2175536 command_runner.go:130] >       "confTemplate": "",
	I0916 11:07:14.945026 2175536 command_runner.go:130] >       "ipPref": ""
	I0916 11:07:14.945032 2175536 command_runner.go:130] >     },
	I0916 11:07:14.945035 2175536 command_runner.go:130] >     "registry": {
	I0916 11:07:14.945045 2175536 command_runner.go:130] >       "configPath": "/etc/containerd/certs.d",
	I0916 11:07:14.945051 2175536 command_runner.go:130] >       "mirrors": null,
	I0916 11:07:14.945055 2175536 command_runner.go:130] >       "configs": null,
	I0916 11:07:14.945061 2175536 command_runner.go:130] >       "auths": null,
	I0916 11:07:14.945064 2175536 command_runner.go:130] >       "headers": null
	I0916 11:07:14.945070 2175536 command_runner.go:130] >     },
	I0916 11:07:14.945074 2175536 command_runner.go:130] >     "imageDecryption": {
	I0916 11:07:14.945090 2175536 command_runner.go:130] >       "keyModel": "node"
	I0916 11:07:14.945110 2175536 command_runner.go:130] >     },
	I0916 11:07:14.945117 2175536 command_runner.go:130] >     "disableTCPService": true,
	I0916 11:07:14.945122 2175536 command_runner.go:130] >     "streamServerAddress": "",
	I0916 11:07:14.945128 2175536 command_runner.go:130] >     "streamServerPort": "10010",
	I0916 11:07:14.945133 2175536 command_runner.go:130] >     "streamIdleTimeout": "4h0m0s",
	I0916 11:07:14.945137 2175536 command_runner.go:130] >     "enableSelinux": false,
	I0916 11:07:14.945141 2175536 command_runner.go:130] >     "selinuxCategoryRange": 1024,
	I0916 11:07:14.945148 2175536 command_runner.go:130] >     "sandboxImage": "registry.k8s.io/pause:3.10",
	I0916 11:07:14.945164 2175536 command_runner.go:130] >     "statsCollectPeriod": 10,
	I0916 11:07:14.945169 2175536 command_runner.go:130] >     "systemdCgroup": false,
	I0916 11:07:14.945173 2175536 command_runner.go:130] >     "enableTLSStreaming": false,
	I0916 11:07:14.945177 2175536 command_runner.go:130] >     "x509KeyPairStreaming": {
	I0916 11:07:14.945188 2175536 command_runner.go:130] >       "tlsCertFile": "",
	I0916 11:07:14.945192 2175536 command_runner.go:130] >       "tlsKeyFile": ""
	I0916 11:07:14.945194 2175536 command_runner.go:130] >     },
	I0916 11:07:14.945199 2175536 command_runner.go:130] >     "maxContainerLogSize": 16384,
	I0916 11:07:14.945202 2175536 command_runner.go:130] >     "disableCgroup": false,
	I0916 11:07:14.945206 2175536 command_runner.go:130] >     "disableApparmor": false,
	I0916 11:07:14.945210 2175536 command_runner.go:130] >     "restrictOOMScoreAdj": false,
	I0916 11:07:14.945214 2175536 command_runner.go:130] >     "maxConcurrentDownloads": 3,
	I0916 11:07:14.945219 2175536 command_runner.go:130] >     "disableProcMount": false,
	I0916 11:07:14.945222 2175536 command_runner.go:130] >     "unsetSeccompProfile": "",
	I0916 11:07:14.945227 2175536 command_runner.go:130] >     "tolerateMissingHugetlbController": true,
	I0916 11:07:14.945232 2175536 command_runner.go:130] >     "disableHugetlbController": true,
	I0916 11:07:14.945237 2175536 command_runner.go:130] >     "device_ownership_from_security_context": false,
	I0916 11:07:14.945242 2175536 command_runner.go:130] >     "ignoreImageDefinedVolumes": false,
	I0916 11:07:14.945249 2175536 command_runner.go:130] >     "netnsMountsUnderStateDir": false,
	I0916 11:07:14.945253 2175536 command_runner.go:130] >     "enableUnprivilegedPorts": true,
	I0916 11:07:14.945258 2175536 command_runner.go:130] >     "enableUnprivilegedICMP": false,
	I0916 11:07:14.945264 2175536 command_runner.go:130] >     "enableCDI": false,
	I0916 11:07:14.945267 2175536 command_runner.go:130] >     "cdiSpecDirs": [
	I0916 11:07:14.945271 2175536 command_runner.go:130] >       "/etc/cdi",
	I0916 11:07:14.945274 2175536 command_runner.go:130] >       "/var/run/cdi"
	I0916 11:07:14.945278 2175536 command_runner.go:130] >     ],
	I0916 11:07:14.945282 2175536 command_runner.go:130] >     "imagePullProgressTimeout": "5m0s",
	I0916 11:07:14.945287 2175536 command_runner.go:130] >     "drainExecSyncIOTimeout": "0s",
	I0916 11:07:14.945294 2175536 command_runner.go:130] >     "imagePullWithSyncFs": false,
	I0916 11:07:14.945299 2175536 command_runner.go:130] >     "ignoreDeprecationWarnings": null,
	I0916 11:07:14.945304 2175536 command_runner.go:130] >     "containerdRootDir": "/var/lib/containerd",
	I0916 11:07:14.945309 2175536 command_runner.go:130] >     "containerdEndpoint": "/run/containerd/containerd.sock",
	I0916 11:07:14.945315 2175536 command_runner.go:130] >     "rootDir": "/var/lib/containerd/io.containerd.grpc.v1.cri",
	I0916 11:07:14.945324 2175536 command_runner.go:130] >     "stateDir": "/run/containerd/io.containerd.grpc.v1.cri"
	I0916 11:07:14.945329 2175536 command_runner.go:130] >   },
	I0916 11:07:14.945333 2175536 command_runner.go:130] >   "golang": "go1.22.7",
	I0916 11:07:14.945337 2175536 command_runner.go:130] >   "lastCNILoadStatus": "OK",
	I0916 11:07:14.945348 2175536 command_runner.go:130] >   "lastCNILoadStatus.default": "OK"
	I0916 11:07:14.945351 2175536 command_runner.go:130] > }
	I0916 11:07:14.948488 2175536 cni.go:84] Creating CNI manager for ""
	I0916 11:07:14.948515 2175536 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 11:07:14.948525 2175536 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:07:14.948549 2175536 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.58.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-890146 NodeName:multinode-890146 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.58.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.58.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPat
h:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:07:14.948685 2175536 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.58.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "multinode-890146"
	  kubeletExtraArgs:
	    node-ip: 192.168.58.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.58.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:07:14.948763 2175536 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:07:14.957220 2175536 command_runner.go:130] > kubeadm
	I0916 11:07:14.957295 2175536 command_runner.go:130] > kubectl
	I0916 11:07:14.957315 2175536 command_runner.go:130] > kubelet
	I0916 11:07:14.958383 2175536 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:07:14.958446 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 11:07:14.968059 2175536 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (320 bytes)
	I0916 11:07:14.986524 2175536 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:07:15.027083 2175536 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2170 bytes)
	I0916 11:07:15.049929 2175536 ssh_runner.go:195] Run: grep 192.168.58.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:07:15.054347 2175536 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.58.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:07:15.066765 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:07:15.161114 2175536 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:07:15.178638 2175536 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146 for IP: 192.168.58.2
	I0916 11:07:15.178659 2175536 certs.go:194] generating shared ca certs ...
	I0916 11:07:15.178685 2175536 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:15.178881 2175536 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 11:07:15.178956 2175536 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 11:07:15.178970 2175536 certs.go:256] generating profile certs ...
	I0916 11:07:15.179046 2175536 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key
	I0916 11:07:15.179064 2175536 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt with IP's: []
	I0916 11:07:15.598940 2175536 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt ...
	I0916 11:07:15.598971 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt: {Name:mk7ab98d016c599af820ddb3ea5f73c56de76d66 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:15.599197 2175536 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key ...
	I0916 11:07:15.599213 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key: {Name:mkca0d412ad6145c6ef5271650396cc573c31df2 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:15.599309 2175536 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key.cd70a0e7
	I0916 11:07:15.599325 2175536 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt.cd70a0e7 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.58.2]
	I0916 11:07:16.019953 2175536 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt.cd70a0e7 ...
	I0916 11:07:16.019989 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt.cd70a0e7: {Name:mk95dd49b821f230f6c0530baed7f4bfdf8c60ac Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:16.020222 2175536 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key.cd70a0e7 ...
	I0916 11:07:16.020240 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key.cd70a0e7: {Name:mkbb51a1e4c459f22a404891e38caed47b3772c4 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:16.020343 2175536 certs.go:381] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt.cd70a0e7 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt
	I0916 11:07:16.020436 2175536 certs.go:385] copying /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key.cd70a0e7 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key
	I0916 11:07:16.020508 2175536 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key
	I0916 11:07:16.020529 2175536 crypto.go:68] Generating cert /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt with IP's: []
	I0916 11:07:16.825523 2175536 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt ...
	I0916 11:07:16.825555 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt: {Name:mka686c367fd92977dbf5d5c98408fe58e5a5937 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:16.825732 2175536 crypto.go:164] Writing key to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key ...
	I0916 11:07:16.825747 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key: {Name:mkce6bfd8cfba42678bcd3dc3e58f7a0a170a962 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:16.825852 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:07:16.825877 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:07:16.825893 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:07:16.825912 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:07:16.825923 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 11:07:16.825939 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 11:07:16.825950 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 11:07:16.825960 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 11:07:16.826017 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 11:07:16.826057 2175536 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 11:07:16.826068 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:07:16.826093 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 11:07:16.826121 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:07:16.826146 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 11:07:16.826190 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:07:16.826222 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:07:16.826243 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 11:07:16.826254 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 11:07:16.826850 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:07:16.868216 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 11:07:16.895839 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:07:16.921370 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 11:07:16.948177 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 11:07:16.973606 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 11:07:16.998400 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:07:17.025704 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 11:07:17.051525 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:07:17.078973 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 11:07:17.103671 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 11:07:17.128356 2175536 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 11:07:17.146818 2175536 ssh_runner.go:195] Run: openssl version
	I0916 11:07:17.152096 2175536 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:07:17.152534 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 11:07:17.162304 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 11:07:17.165902 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:07:17.165941 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:07:17.165992 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 11:07:17.172790 2175536 command_runner.go:130] > 51391683
	I0916 11:07:17.173264 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 11:07:17.182982 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 11:07:17.192748 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 11:07:17.196241 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:07:17.196278 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:07:17.196352 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 11:07:17.203077 2175536 command_runner.go:130] > 3ec20f2e
	I0916 11:07:17.203524 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:07:17.213254 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:07:17.223233 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:07:17.227103 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:07:17.227139 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:07:17.227200 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:07:17.233935 2175536 command_runner.go:130] > b5213941
	I0916 11:07:17.234440 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:07:17.244114 2175536 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:07:17.247546 2175536 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:07:17.247583 2175536 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:07:17.247622 2175536 kubeadm.go:392] StartCluster: {Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false C
ustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:07:17.247709 2175536 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 11:07:17.247773 2175536 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 11:07:17.284532 2175536 cri.go:89] found id: ""
	I0916 11:07:17.284626 2175536 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 11:07:17.293414 2175536 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0916 11:07:17.293441 2175536 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0916 11:07:17.293449 2175536 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0916 11:07:17.293513 2175536 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0916 11:07:17.302186 2175536 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0916 11:07:17.302252 2175536 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0916 11:07:17.310213 2175536 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0916 11:07:17.310239 2175536 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0916 11:07:17.310247 2175536 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0916 11:07:17.310256 2175536 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 11:07:17.311438 2175536 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0916 11:07:17.311459 2175536 kubeadm.go:157] found existing configuration files:
	
	I0916 11:07:17.311542 2175536 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0916 11:07:17.320590 2175536 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 11:07:17.320663 2175536 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0916 11:07:17.320751 2175536 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0916 11:07:17.329389 2175536 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0916 11:07:17.338451 2175536 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 11:07:17.338499 2175536 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0916 11:07:17.338564 2175536 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0916 11:07:17.347101 2175536 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0916 11:07:17.355757 2175536 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 11:07:17.355806 2175536 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0916 11:07:17.355886 2175536 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0916 11:07:17.364598 2175536 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0916 11:07:17.373592 2175536 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 11:07:17.373644 2175536 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0916 11:07:17.373702 2175536 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0916 11:07:17.382440 2175536 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0916 11:07:17.424574 2175536 kubeadm.go:310] [init] Using Kubernetes version: v1.31.1
	I0916 11:07:17.424604 2175536 command_runner.go:130] > [init] Using Kubernetes version: v1.31.1
	I0916 11:07:17.424647 2175536 kubeadm.go:310] [preflight] Running pre-flight checks
	I0916 11:07:17.424657 2175536 command_runner.go:130] > [preflight] Running pre-flight checks
	I0916 11:07:17.444730 2175536 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:07:17.444785 2175536 command_runner.go:130] > [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:07:17.444841 2175536 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:07:17.444851 2175536 command_runner.go:130] > KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:07:17.444884 2175536 kubeadm.go:310] OS: Linux
	I0916 11:07:17.444892 2175536 command_runner.go:130] > OS: Linux
	I0916 11:07:17.444936 2175536 kubeadm.go:310] CGROUPS_CPU: enabled
	I0916 11:07:17.444944 2175536 command_runner.go:130] > CGROUPS_CPU: enabled
	I0916 11:07:17.444990 2175536 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0916 11:07:17.444997 2175536 command_runner.go:130] > CGROUPS_CPUACCT: enabled
	I0916 11:07:17.445043 2175536 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0916 11:07:17.445051 2175536 command_runner.go:130] > CGROUPS_CPUSET: enabled
	I0916 11:07:17.445097 2175536 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0916 11:07:17.445105 2175536 command_runner.go:130] > CGROUPS_DEVICES: enabled
	I0916 11:07:17.445152 2175536 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0916 11:07:17.445170 2175536 command_runner.go:130] > CGROUPS_FREEZER: enabled
	I0916 11:07:17.445220 2175536 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0916 11:07:17.445227 2175536 command_runner.go:130] > CGROUPS_MEMORY: enabled
	I0916 11:07:17.445271 2175536 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0916 11:07:17.445278 2175536 command_runner.go:130] > CGROUPS_PIDS: enabled
	I0916 11:07:17.445324 2175536 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0916 11:07:17.445331 2175536 command_runner.go:130] > CGROUPS_HUGETLB: enabled
	I0916 11:07:17.445376 2175536 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0916 11:07:17.445383 2175536 command_runner.go:130] > CGROUPS_BLKIO: enabled
	I0916 11:07:17.522659 2175536 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 11:07:17.522702 2175536 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0916 11:07:17.522798 2175536 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 11:07:17.522808 2175536 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0916 11:07:17.522897 2175536 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 11:07:17.522905 2175536 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0916 11:07:17.531901 2175536 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 11:07:17.532135 2175536 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0916 11:07:17.535785 2175536 out.go:235]   - Generating certificates and keys ...
	I0916 11:07:17.535897 2175536 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0916 11:07:17.535912 2175536 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0916 11:07:17.535978 2175536 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0916 11:07:17.535987 2175536 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0916 11:07:17.784579 2175536 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 11:07:17.784609 2175536 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0916 11:07:19.375182 2175536 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0916 11:07:19.375210 2175536 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0916 11:07:19.850362 2175536 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0916 11:07:19.850385 2175536 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0916 11:07:20.229756 2175536 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0916 11:07:20.229785 2175536 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0916 11:07:20.759185 2175536 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0916 11:07:20.759217 2175536 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0916 11:07:20.759502 2175536 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-890146] and IPs [192.168.58.2 127.0.0.1 ::1]
	I0916 11:07:20.759518 2175536 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-890146] and IPs [192.168.58.2 127.0.0.1 ::1]
	I0916 11:07:21.279131 2175536 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0916 11:07:21.279162 2175536 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0916 11:07:21.279291 2175536 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-890146] and IPs [192.168.58.2 127.0.0.1 ::1]
	I0916 11:07:21.279305 2175536 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-890146] and IPs [192.168.58.2 127.0.0.1 ::1]
	I0916 11:07:21.571891 2175536 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 11:07:21.571918 2175536 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0916 11:07:22.084709 2175536 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 11:07:22.084737 2175536 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0916 11:07:22.479391 2175536 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0916 11:07:22.479417 2175536 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0916 11:07:22.479682 2175536 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:07:22.479694 2175536 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0916 11:07:23.087957 2175536 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:07:23.087993 2175536 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0916 11:07:23.399549 2175536 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:07:23.399574 2175536 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0916 11:07:23.772170 2175536 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:07:23.772197 2175536 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0916 11:07:24.211547 2175536 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:07:24.211573 2175536 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0916 11:07:24.459970 2175536 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:07:24.459995 2175536 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0916 11:07:24.460618 2175536 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:07:24.460636 2175536 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0916 11:07:24.464402 2175536 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:07:24.464578 2175536 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0916 11:07:24.467079 2175536 out.go:235]   - Booting up control plane ...
	I0916 11:07:24.467193 2175536 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:07:24.467205 2175536 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0916 11:07:24.467583 2175536 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:07:24.467605 2175536 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0916 11:07:24.468768 2175536 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:07:24.468786 2175536 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0916 11:07:24.479816 2175536 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:07:24.479842 2175536 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:07:24.486177 2175536 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:07:24.486202 2175536 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:07:24.486455 2175536 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0916 11:07:24.486467 2175536 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0916 11:07:24.591102 2175536 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:07:24.591127 2175536 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0916 11:07:24.591231 2175536 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:07:24.591235 2175536 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:07:25.586751 2175536 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00150665s
	I0916 11:07:25.586777 2175536 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.00150665s
	I0916 11:07:25.586896 2175536 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:07:25.586909 2175536 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0916 11:07:32.088377 2175536 kubeadm.go:310] [api-check] The API server is healthy after 6.501798255s
	I0916 11:07:32.088402 2175536 command_runner.go:130] > [api-check] The API server is healthy after 6.501798255s
	I0916 11:07:32.112663 2175536 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:07:32.112686 2175536 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:07:32.137967 2175536 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:07:32.137990 2175536 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:07:32.181676 2175536 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:07:32.181708 2175536 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:07:32.181900 2175536 kubeadm.go:310] [mark-control-plane] Marking the node multinode-890146 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:07:32.181906 2175536 command_runner.go:130] > [mark-control-plane] Marking the node multinode-890146 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:07:32.194621 2175536 kubeadm.go:310] [bootstrap-token] Using token: q96uai.z85znounub5fn5uo
	I0916 11:07:32.194796 2175536 command_runner.go:130] > [bootstrap-token] Using token: q96uai.z85znounub5fn5uo
	I0916 11:07:32.196352 2175536 out.go:235]   - Configuring RBAC rules ...
	I0916 11:07:32.196484 2175536 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:07:32.196494 2175536 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:07:32.203638 2175536 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:07:32.203663 2175536 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:07:32.218667 2175536 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:07:32.218713 2175536 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:07:32.229888 2175536 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:07:32.229913 2175536 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:07:32.238376 2175536 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:07:32.238401 2175536 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:07:32.247247 2175536 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:07:32.247275 2175536 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:07:32.496151 2175536 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:07:32.496178 2175536 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:07:32.919854 2175536 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 11:07:32.919880 2175536 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0916 11:07:33.495386 2175536 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 11:07:33.495408 2175536 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0916 11:07:33.496574 2175536 kubeadm.go:310] 
	I0916 11:07:33.496666 2175536 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 11:07:33.496680 2175536 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0916 11:07:33.496687 2175536 kubeadm.go:310] 
	I0916 11:07:33.496771 2175536 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 11:07:33.496779 2175536 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0916 11:07:33.496784 2175536 kubeadm.go:310] 
	I0916 11:07:33.496810 2175536 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 11:07:33.496817 2175536 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0916 11:07:33.496875 2175536 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:07:33.496892 2175536 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:07:33.496965 2175536 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:07:33.496997 2175536 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:07:33.497003 2175536 kubeadm.go:310] 
	I0916 11:07:33.497061 2175536 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 11:07:33.497066 2175536 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0916 11:07:33.497070 2175536 kubeadm.go:310] 
	I0916 11:07:33.497117 2175536 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:07:33.497122 2175536 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:07:33.497126 2175536 kubeadm.go:310] 
	I0916 11:07:33.497177 2175536 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 11:07:33.497181 2175536 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0916 11:07:33.497254 2175536 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:07:33.497258 2175536 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:07:33.497325 2175536 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:07:33.497328 2175536 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:07:33.497332 2175536 kubeadm.go:310] 
	I0916 11:07:33.497414 2175536 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:07:33.497418 2175536 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:07:33.497494 2175536 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 11:07:33.497498 2175536 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0916 11:07:33.497502 2175536 kubeadm.go:310] 
	I0916 11:07:33.497584 2175536 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token q96uai.z85znounub5fn5uo \
	I0916 11:07:33.497587 2175536 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token q96uai.z85znounub5fn5uo \
	I0916 11:07:33.497688 2175536 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 11:07:33.497692 2175536 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 11:07:33.497712 2175536 kubeadm.go:310] 	--control-plane 
	I0916 11:07:33.497716 2175536 command_runner.go:130] > 	--control-plane 
	I0916 11:07:33.497720 2175536 kubeadm.go:310] 
	I0916 11:07:33.497804 2175536 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:07:33.497808 2175536 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:07:33.497812 2175536 kubeadm.go:310] 
	I0916 11:07:33.497893 2175536 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token q96uai.z85znounub5fn5uo \
	I0916 11:07:33.497897 2175536 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token q96uai.z85znounub5fn5uo \
	I0916 11:07:33.497996 2175536 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 11:07:33.498000 2175536 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 11:07:33.502287 2175536 kubeadm.go:310] W0916 11:07:17.421188    1046 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:07:33.502315 2175536 command_runner.go:130] ! W0916 11:07:17.421188    1046 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:07:33.502646 2175536 kubeadm.go:310] W0916 11:07:17.422059    1046 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:07:33.502669 2175536 command_runner.go:130] ! W0916 11:07:17.422059    1046 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:07:33.502941 2175536 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:07:33.502955 2175536 command_runner.go:130] ! 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:07:33.503062 2175536 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:07:33.503072 2175536 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:07:33.503090 2175536 cni.go:84] Creating CNI manager for ""
	I0916 11:07:33.503101 2175536 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0916 11:07:33.505082 2175536 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 11:07:33.506860 2175536 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 11:07:33.510659 2175536 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0916 11:07:33.510724 2175536 command_runner.go:130] >   Size: 4030506   	Blocks: 7880       IO Block: 4096   regular file
	I0916 11:07:33.510732 2175536 command_runner.go:130] > Device: 3ch/60d	Inode: 1314974     Links: 1
	I0916 11:07:33.510738 2175536 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:07:33.510746 2175536 command_runner.go:130] > Access: 2023-12-04 16:39:54.000000000 +0000
	I0916 11:07:33.510751 2175536 command_runner.go:130] > Modify: 2023-12-04 16:39:54.000000000 +0000
	I0916 11:07:33.510755 2175536 command_runner.go:130] > Change: 2024-09-16 10:29:57.653995685 +0000
	I0916 11:07:33.510760 2175536 command_runner.go:130] >  Birth: 2024-09-16 10:29:57.597996108 +0000
	I0916 11:07:33.510910 2175536 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 11:07:33.510925 2175536 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 11:07:33.531123 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 11:07:33.781485 2175536 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0916 11:07:33.789913 2175536 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0916 11:07:33.800337 2175536 command_runner.go:130] > serviceaccount/kindnet created
	I0916 11:07:33.811879 2175536 command_runner.go:130] > daemonset.apps/kindnet created
	I0916 11:07:33.816494 2175536 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 11:07:33.816687 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:33.816774 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-890146 minikube.k8s.io/updated_at=2024_09_16T11_07_33_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=multinode-890146 minikube.k8s.io/primary=true
	I0916 11:07:34.046500 2175536 command_runner.go:130] > node/multinode-890146 labeled
	I0916 11:07:34.057074 2175536 command_runner.go:130] > -16
	I0916 11:07:34.068365 2175536 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0916 11:07:34.074023 2175536 ops.go:34] apiserver oom_adj: -16
	I0916 11:07:34.074134 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:34.178397 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:34.575029 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:34.666377 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:35.074264 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:35.163614 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:35.575071 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:35.661953 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:36.074521 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:36.171090 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:36.574610 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:36.709592 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:37.075111 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:37.169222 2175536 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0916 11:07:37.574790 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:07:37.682452 2175536 command_runner.go:130] > NAME      SECRETS   AGE
	I0916 11:07:37.682475 2175536 command_runner.go:130] > default   0         0s
	I0916 11:07:37.682514 2175536 kubeadm.go:1113] duration metric: took 3.865900406s to wait for elevateKubeSystemPrivileges
	I0916 11:07:37.682530 2175536 kubeadm.go:394] duration metric: took 20.434911561s to StartCluster
	I0916 11:07:37.682548 2175536 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:37.682618 2175536 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:07:37.683362 2175536 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:07:37.683598 2175536 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:07:37.683702 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0916 11:07:37.683928 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:07:37.683976 2175536 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:07:37.684044 2175536 addons.go:69] Setting storage-provisioner=true in profile "multinode-890146"
	I0916 11:07:37.684059 2175536 addons.go:234] Setting addon storage-provisioner=true in "multinode-890146"
	I0916 11:07:37.684085 2175536 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:07:37.684819 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:37.685206 2175536 addons.go:69] Setting default-storageclass=true in profile "multinode-890146"
	I0916 11:07:37.685232 2175536 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-890146"
	I0916 11:07:37.685494 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:37.687827 2175536 out.go:177] * Verifying Kubernetes components...
	I0916 11:07:37.689797 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:07:37.718484 2175536 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:07:37.720410 2175536 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:07:37.720429 2175536 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 11:07:37.720494 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:37.730359 2175536 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:07:37.730639 2175536 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:07:37.731502 2175536 addons.go:234] Setting addon default-storageclass=true in "multinode-890146"
	I0916 11:07:37.731532 2175536 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:07:37.731955 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:07:37.732166 2175536 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 11:07:37.776744 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:37.782305 2175536 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 11:07:37.782325 2175536 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 11:07:37.782387 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:07:37.810615 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:07:37.985465 2175536 command_runner.go:130] > apiVersion: v1
	I0916 11:07:37.985498 2175536 command_runner.go:130] > data:
	I0916 11:07:37.985503 2175536 command_runner.go:130] >   Corefile: |
	I0916 11:07:37.985507 2175536 command_runner.go:130] >     .:53 {
	I0916 11:07:37.985510 2175536 command_runner.go:130] >         errors
	I0916 11:07:37.985515 2175536 command_runner.go:130] >         health {
	I0916 11:07:37.985545 2175536 command_runner.go:130] >            lameduck 5s
	I0916 11:07:37.985556 2175536 command_runner.go:130] >         }
	I0916 11:07:37.985579 2175536 command_runner.go:130] >         ready
	I0916 11:07:37.985592 2175536 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0916 11:07:37.985596 2175536 command_runner.go:130] >            pods insecure
	I0916 11:07:37.985602 2175536 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0916 11:07:37.985621 2175536 command_runner.go:130] >            ttl 30
	I0916 11:07:37.985637 2175536 command_runner.go:130] >         }
	I0916 11:07:37.985655 2175536 command_runner.go:130] >         prometheus :9153
	I0916 11:07:37.985662 2175536 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0916 11:07:37.985677 2175536 command_runner.go:130] >            max_concurrent 1000
	I0916 11:07:37.985680 2175536 command_runner.go:130] >         }
	I0916 11:07:37.985685 2175536 command_runner.go:130] >         cache 30
	I0916 11:07:37.985689 2175536 command_runner.go:130] >         loop
	I0916 11:07:37.985693 2175536 command_runner.go:130] >         reload
	I0916 11:07:37.985696 2175536 command_runner.go:130] >         loadbalance
	I0916 11:07:37.985704 2175536 command_runner.go:130] >     }
	I0916 11:07:37.985708 2175536 command_runner.go:130] > kind: ConfigMap
	I0916 11:07:37.985711 2175536 command_runner.go:130] > metadata:
	I0916 11:07:37.985732 2175536 command_runner.go:130] >   creationTimestamp: "2024-09-16T11:07:32Z"
	I0916 11:07:37.985742 2175536 command_runner.go:130] >   name: coredns
	I0916 11:07:37.985746 2175536 command_runner.go:130] >   namespace: kube-system
	I0916 11:07:37.985750 2175536 command_runner.go:130] >   resourceVersion: "270"
	I0916 11:07:37.985765 2175536 command_runner.go:130] >   uid: 0b2eb92e-2dcd-4757-9f54-c5a717906b13
	I0916 11:07:37.990194 2175536 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:07:37.990418 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.58.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0916 11:07:38.031706 2175536 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 11:07:38.091657 2175536 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:07:38.576844 2175536 command_runner.go:130] > configmap/coredns replaced
	I0916 11:07:38.583409 2175536 start.go:971] {"host.minikube.internal": 192.168.58.1} host record injected into CoreDNS's ConfigMap
	I0916 11:07:38.584195 2175536 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:07:38.584499 2175536 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:07:38.584953 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:07:38.583598 2175536 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0916 11:07:38.583888 2175536 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:07:38.585114 2175536 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 11:07:38.585141 2175536 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 11:07:38.585199 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/apis/storage.k8s.io/v1/storageclasses
	I0916 11:07:38.585209 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.585217 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.585227 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.585340 2175536 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:07:38.586158 2175536 node_ready.go:35] waiting up to 6m0s for node "multinode-890146" to be "Ready" ...
	I0916 11:07:38.585027 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.586324 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.586361 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.586307 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:38.586971 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.587013 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.587033 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.614333 2175536 round_trippers.go:574] Response Status: 200 OK in 27 milliseconds
	I0916 11:07:38.614362 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.614370 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.614375 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.614379 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.614383 2175536 round_trippers.go:580]     Content-Length: 291
	I0916 11:07:38.614385 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.614389 2175536 round_trippers.go:580]     Audit-Id: 4a094915-a6d5-4128-9d47-235633d59b72
	I0916 11:07:38.614391 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.614415 2175536 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"517b28ab-923f-4c66-9f57-81af25d1c992","resourceVersion":"385","creationTimestamp":"2024-09-16T11:07:32Z"},"spec":{"replicas":2},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:07:38.614886 2175536 request.go:1351] Request Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"517b28ab-923f-4c66-9f57-81af25d1c992","resourceVersion":"385","creationTimestamp":"2024-09-16T11:07:32Z"},"spec":{"replicas":1},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:07:38.614950 2175536 round_trippers.go:463] PUT https://192.168.58.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:07:38.614963 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.614972 2175536 round_trippers.go:473]     Content-Type: application/json
	I0916 11:07:38.614977 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.614981 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.615192 2175536 round_trippers.go:574] Response Status: 200 OK in 28 milliseconds
	I0916 11:07:38.615210 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.615217 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.615223 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.615227 2175536 round_trippers.go:580]     Audit-Id: 859e4c5d-5c9d-471b-8619-3ea6d9bf3407
	I0916 11:07:38.615237 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.615244 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.615247 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.615324 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:38.615959 2175536 node_ready.go:49] node "multinode-890146" has status "Ready":"True"
	I0916 11:07:38.615986 2175536 node_ready.go:38] duration metric: took 29.750107ms for node "multinode-890146" to be "Ready" ...
	I0916 11:07:38.616008 2175536 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:07:38.616114 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:07:38.616128 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.616137 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.616149 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.620217 2175536 round_trippers.go:574] Response Status: 200 OK in 34 milliseconds
	I0916 11:07:38.620246 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.620254 2175536 round_trippers.go:580]     Content-Length: 1273
	I0916 11:07:38.620285 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.620313 2175536 round_trippers.go:580]     Audit-Id: 283c52c3-8695-4b9d-9523-92eb041f0f49
	I0916 11:07:38.620317 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.620320 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.620323 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.620332 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.620384 2175536 request.go:1351] Response Body: {"kind":"StorageClassList","apiVersion":"storage.k8s.io/v1","metadata":{"resourceVersion":"393"},"items":[{"metadata":{"name":"standard","uid":"f44a335b-ae9b-40d9-90b1-db69666be9fa","resourceVersion":"389","creationTimestamp":"2024-09-16T11:07:38Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:07:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kuberne
tes.io/last-applied-configuration":{},"f:storageclass.kubernetes.io/is- [truncated 249 chars]
	I0916 11:07:38.620828 2175536 request.go:1351] Request Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"f44a335b-ae9b-40d9-90b1-db69666be9fa","resourceVersion":"389","creationTimestamp":"2024-09-16T11:07:38Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:07:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storageclas
s.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 11:07:38.620911 2175536 round_trippers.go:463] PUT https://192.168.58.2:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0916 11:07:38.620925 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.620933 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.620943 2175536 round_trippers.go:473]     Content-Type: application/json
	I0916 11:07:38.620946 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.630264 2175536 round_trippers.go:574] Response Status: 200 OK in 15 milliseconds
	I0916 11:07:38.630302 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.630311 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.630334 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.630345 2175536 round_trippers.go:580]     Content-Length: 291
	I0916 11:07:38.630349 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.630352 2175536 round_trippers.go:580]     Audit-Id: ca088995-9d42-423f-b4b0-acecc0697bbc
	I0916 11:07:38.630355 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.630358 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.630394 2175536 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"517b28ab-923f-4c66-9f57-81af25d1c992","resourceVersion":"394","creationTimestamp":"2024-09-16T11:07:32Z"},"spec":{"replicas":1},"status":{"replicas":2,"selector":"k8s-app=kube-dns"}}
	I0916 11:07:38.639251 2175536 round_trippers.go:574] Response Status: 200 OK in 23 milliseconds
	I0916 11:07:38.639287 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.639296 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.639300 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.639303 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.639306 2175536 round_trippers.go:580]     Audit-Id: 1fbdaac5-fe08-4e41-822c-b05050b8ba14
	I0916 11:07:38.639308 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.639311 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.639860 2175536 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"395"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-bb4db","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"3fd53b00-28ef-44ef-8541-097ebc870b2f","resourceVersion":"383","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 61241 chars]
	I0916 11:07:38.642421 2175536 round_trippers.go:574] Response Status: 200 OK in 21 milliseconds
	I0916 11:07:38.642447 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.642455 2175536 round_trippers.go:580]     Audit-Id: 5efc481d-8713-402a-a63f-978981e5a3c5
	I0916 11:07:38.642458 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.642461 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.642464 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.642468 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.642577 2175536 round_trippers.go:580]     Content-Length: 1220
	I0916 11:07:38.642583 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.642628 2175536 request.go:1351] Response Body: {"kind":"StorageClass","apiVersion":"storage.k8s.io/v1","metadata":{"name":"standard","uid":"f44a335b-ae9b-40d9-90b1-db69666be9fa","resourceVersion":"389","creationTimestamp":"2024-09-16T11:07:38Z","labels":{"addonmanager.kubernetes.io/mode":"EnsureExists"},"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"storage.k8s.io/v1\",\"kind\":\"StorageClass\",\"metadata\":{\"annotations\":{\"storageclass.kubernetes.io/is-default-class\":\"true\"},\"labels\":{\"addonmanager.kubernetes.io/mode\":\"EnsureExists\"},\"name\":\"standard\"},\"provisioner\":\"k8s.io/minikube-hostpath\"}\n","storageclass.kubernetes.io/is-default-class":"true"},"managedFields":[{"manager":"kubectl-client-side-apply","operation":"Update","apiVersion":"storage.k8s.io/v1","time":"2024-09-16T11:07:38Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubectl.kubernetes.io/last-applied-configuration":{},"f:storagecla
ss.kubernetes.io/is-default-class":{}},"f:labels":{".":{},"f:addonmanag [truncated 196 chars]
	I0916 11:07:38.645446 2175536 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-bb4db" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:38.645584 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-bb4db
	I0916 11:07:38.645597 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.645606 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.645609 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.648071 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:38.648127 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.648141 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.648148 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.648155 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.648158 2175536 round_trippers.go:580]     Audit-Id: 00050f3a-dd7b-4cf7-9ea7-3e35763656d2
	I0916 11:07:38.648161 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.648170 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.648380 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-bb4db","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"3fd53b00-28ef-44ef-8541-097ebc870b2f","resourceVersion":"383","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:38.648924 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:38.648945 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:38.648953 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:38.648958 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:38.653613 2175536 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:07:38.653638 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:38.653646 2175536 round_trippers.go:580]     Audit-Id: c3710fc8-2f99-4c0a-b022-d30c8ed1273c
	I0916 11:07:38.653652 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:38.653655 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:38.653659 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:38.653662 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:38.653665 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:38 GMT
	I0916 11:07:38.654153 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:38.871395 2175536 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0916 11:07:38.889747 2175536 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0916 11:07:38.910198 2175536 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0916 11:07:38.930443 2175536 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0916 11:07:38.943209 2175536 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0916 11:07:38.955414 2175536 command_runner.go:130] > pod/storage-provisioner created
	I0916 11:07:38.970871 2175536 out.go:177] * Enabled addons: default-storageclass, storage-provisioner
	I0916 11:07:38.972884 2175536 addons.go:510] duration metric: took 1.288901926s for enable addons: enabled=[default-storageclass storage-provisioner]
	I0916 11:07:39.085387 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0916 11:07:39.085409 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:39.085419 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:39.085423 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:39.088200 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:39.088222 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:39.088230 2175536 round_trippers.go:580]     Content-Length: 291
	I0916 11:07:39.088236 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:39 GMT
	I0916 11:07:39.088241 2175536 round_trippers.go:580]     Audit-Id: 19ffed9d-16d2-409c-bfb3-41237962b67a
	I0916 11:07:39.088245 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:39.088248 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:39.088251 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:39.088254 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:39.088501 2175536 request.go:1351] Response Body: {"kind":"Scale","apiVersion":"autoscaling/v1","metadata":{"name":"coredns","namespace":"kube-system","uid":"517b28ab-923f-4c66-9f57-81af25d1c992","resourceVersion":"405","creationTimestamp":"2024-09-16T11:07:32Z"},"spec":{"replicas":1},"status":{"replicas":1,"selector":"k8s-app=kube-dns"}}
	I0916 11:07:39.088603 2175536 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-890146" context rescaled to 1 replicas
	I0916 11:07:39.145943 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-bb4db
	I0916 11:07:39.146018 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:39.146032 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:39.146058 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:39.148854 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:39.148874 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:39.148883 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:39 GMT
	I0916 11:07:39.148888 2175536 round_trippers.go:580]     Audit-Id: b365c193-f3ab-4b28-81f4-06a5e9e7e5b6
	I0916 11:07:39.148892 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:39.148895 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:39.148898 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:39.148901 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:39.149560 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-bb4db","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"3fd53b00-28ef-44ef-8541-097ebc870b2f","resourceVersion":"397","creationTimestamp":"2024-09-16T11:07:37Z","deletionTimestamp":"2024-09-16T11:08:08Z","deletionGracePeriodSeconds":30,"labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:pod
AntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecutio [truncated 6438 chars]
	I0916 11:07:39.150216 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:39.150266 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:39.150291 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:39.150312 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:39.152832 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:39.152887 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:39.152919 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:39.152938 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:39.152952 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:39.152970 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:39.152997 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:39 GMT
	I0916 11:07:39.153014 2175536 round_trippers.go:580]     Audit-Id: 30488fb8-bd7b-41c2-9a34-cb4b4b9d465c
	I0916 11:07:39.153655 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:39.645732 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-bb4db
	I0916 11:07:39.645759 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:39.645768 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:39.645772 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:39.648445 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:39.648473 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:39.648482 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:39 GMT
	I0916 11:07:39.648489 2175536 round_trippers.go:580]     Audit-Id: 5be2ee27-86ee-4352-8a93-05c002488c36
	I0916 11:07:39.648492 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:39.648496 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:39.648498 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:39.648501 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:39.648663 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-bb4db","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"3fd53b00-28ef-44ef-8541-097ebc870b2f","resourceVersion":"397","creationTimestamp":"2024-09-16T11:07:37Z","deletionTimestamp":"2024-09-16T11:08:08Z","deletionGracePeriodSeconds":30,"labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:pod
AntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecutio [truncated 6438 chars]
	I0916 11:07:39.649244 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:39.649260 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:39.649269 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:39.649274 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:39.651575 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:39.651600 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:39.651608 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:39.651614 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:39.651619 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:39.651623 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:39 GMT
	I0916 11:07:39.651628 2175536 round_trippers.go:580]     Audit-Id: 9a3c6506-52db-42a2-a4af-1c1bbfaa92fe
	I0916 11:07:39.651632 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:39.652119 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:40.146336 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-bb4db
	I0916 11:07:40.146362 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:40.146374 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:40.146381 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:40.148782 2175536 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 11:07:40.148823 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:40.148832 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:40.148838 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:40.148842 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:40.148846 2175536 round_trippers.go:580]     Content-Length: 216
	I0916 11:07:40.148850 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:40 GMT
	I0916 11:07:40.148854 2175536 round_trippers.go:580]     Audit-Id: 5ab32d15-4dae-45a2-a551-9bf3ba96b47b
	I0916 11:07:40.148858 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:40.148887 2175536 request.go:1351] Response Body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"pods \"coredns-7c65d6cfc9-bb4db\" not found","reason":"NotFound","details":{"name":"coredns-7c65d6cfc9-bb4db","kind":"pods"},"code":404}
	I0916 11:07:40.149157 2175536 pod_ready.go:98] error getting pod "coredns-7c65d6cfc9-bb4db" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bb4db" not found
	I0916 11:07:40.149180 2175536 pod_ready.go:82] duration metric: took 1.503700038s for pod "coredns-7c65d6cfc9-bb4db" in "kube-system" namespace to be "Ready" ...
	E0916 11:07:40.149192 2175536 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "coredns-7c65d6cfc9-bb4db" in "kube-system" namespace (skipping!): pods "coredns-7c65d6cfc9-bb4db" not found
	I0916 11:07:40.149204 2175536 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:40.149276 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:40.149287 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:40.149295 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:40.149300 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:40.151765 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:40.151787 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:40.151796 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:40.151800 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:40.151804 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:40.151807 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:40.151810 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:40 GMT
	I0916 11:07:40.151813 2175536 round_trippers.go:580]     Audit-Id: 34ca6fc7-9d78-41c7-bac9-3f621a27542a
	I0916 11:07:40.152023 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:40.152601 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:40.152618 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:40.152627 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:40.152631 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:40.155026 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:40.155051 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:40.155060 2175536 round_trippers.go:580]     Audit-Id: 764d0383-512c-419b-8e3e-230374d932db
	I0916 11:07:40.155064 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:40.155067 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:40.155070 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:40.155073 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:40.155077 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:40 GMT
	I0916 11:07:40.155516 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:40.650209 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:40.650233 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:40.650243 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:40.650249 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:40.652601 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:40.652628 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:40.652636 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:40.652641 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:40.652644 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:40.652649 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:40 GMT
	I0916 11:07:40.652652 2175536 round_trippers.go:580]     Audit-Id: 1bd8ff28-3d0d-4fe7-8c36-446b92a7542e
	I0916 11:07:40.652654 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:40.652907 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:40.653460 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:40.653479 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:40.653488 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:40.653493 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:40.655599 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:40.655662 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:40.655686 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:40.655704 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:40.655736 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:40 GMT
	I0916 11:07:40.655758 2175536 round_trippers.go:580]     Audit-Id: 56b35a80-4ecd-4abd-835c-abd6596a50c1
	I0916 11:07:40.655775 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:40.655786 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:40.655938 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:41.149907 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:41.149937 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:41.149947 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:41.149952 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:41.152358 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:41.152392 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:41.152401 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:41.152405 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:41 GMT
	I0916 11:07:41.152409 2175536 round_trippers.go:580]     Audit-Id: 8b7a240b-1a4d-4031-a2a9-209c23947913
	I0916 11:07:41.152415 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:41.152419 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:41.152423 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:41.152679 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:41.153246 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:41.153262 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:41.153270 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:41.153275 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:41.155222 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:41.155272 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:41.155303 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:41 GMT
	I0916 11:07:41.155321 2175536 round_trippers.go:580]     Audit-Id: a57fca7e-ed29-4a1a-ba8f-d43730b173aa
	I0916 11:07:41.155353 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:41.155374 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:41.155385 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:41.155389 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:41.155552 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:41.650177 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:41.650208 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:41.650225 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:41.650229 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:41.652864 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:41.652961 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:41.652970 2175536 round_trippers.go:580]     Audit-Id: 9832851e-07a8-4c34-a483-bfa0e73c4f45
	I0916 11:07:41.652975 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:41.652978 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:41.652982 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:41.652987 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:41.652990 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:41 GMT
	I0916 11:07:41.653103 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:41.653667 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:41.653687 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:41.653696 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:41.653701 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:41.656006 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:41.656072 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:41.656097 2175536 round_trippers.go:580]     Audit-Id: f3d0e4f0-4c35-47c6-8331-feeb1ff72110
	I0916 11:07:41.656101 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:41.656104 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:41.656107 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:41.656110 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:41.656113 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:41 GMT
	I0916 11:07:41.656255 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:42.150375 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:42.150404 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:42.150415 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:42.150422 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:42.153554 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:42.153583 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:42.153593 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:42.153597 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:42.153600 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:42.153603 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:42.153606 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:42 GMT
	I0916 11:07:42.153609 2175536 round_trippers.go:580]     Audit-Id: 29651ac0-78f3-4159-bf3f-57eb5afa0075
	I0916 11:07:42.153951 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:42.154537 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:42.154555 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:42.154562 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:42.154567 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:42.157323 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:42.157350 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:42.157360 2175536 round_trippers.go:580]     Audit-Id: 7f87e003-ee2a-4503-bca5-eb000b894d54
	I0916 11:07:42.157365 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:42.157395 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:42.157400 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:42.157403 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:42.157408 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:42 GMT
	I0916 11:07:42.157911 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:42.158317 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:42.650097 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:42.650125 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:42.650135 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:42.650141 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:42.652718 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:42.652801 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:42.652823 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:42 GMT
	I0916 11:07:42.652842 2175536 round_trippers.go:580]     Audit-Id: e9319798-df4d-4305-bca0-735e18e44921
	I0916 11:07:42.652874 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:42.652898 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:42.652921 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:42.652933 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:42.653091 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:42.653726 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:42.653752 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:42.653762 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:42.653767 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:42.655841 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:42.655864 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:42.655873 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:42.655879 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:42 GMT
	I0916 11:07:42.655882 2175536 round_trippers.go:580]     Audit-Id: 9e2a34c4-fff5-4866-82e9-0231140dd2c6
	I0916 11:07:42.655885 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:42.655888 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:42.655899 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:42.656196 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:43.150410 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:43.150437 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:43.150447 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:43.150451 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:43.152996 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:43.153020 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:43.153030 2175536 round_trippers.go:580]     Audit-Id: 975eeb6c-ce5d-455d-a514-211c32f409b5
	I0916 11:07:43.153034 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:43.153037 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:43.153042 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:43.153045 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:43.153049 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:43 GMT
	I0916 11:07:43.153348 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:43.153928 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:43.153948 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:43.153957 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:43.153961 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:43.156294 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:43.156358 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:43.156381 2175536 round_trippers.go:580]     Audit-Id: 4841fd85-d136-42f7-8274-d51d7a4f33d6
	I0916 11:07:43.156400 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:43.156431 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:43.156452 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:43.156467 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:43.156484 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:43 GMT
	I0916 11:07:43.157073 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"368","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:43.650277 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:43.650300 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:43.650310 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:43.650315 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:43.652921 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:43.652946 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:43.652955 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:43.652971 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:43.652975 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:43 GMT
	I0916 11:07:43.652978 2175536 round_trippers.go:580]     Audit-Id: 3add61c7-2466-4b49-a2e5-ae5897022c2a
	I0916 11:07:43.652981 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:43.652984 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:43.653111 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:43.653715 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:43.653733 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:43.653752 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:43.653765 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:43.655796 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:43.655818 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:43.655826 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:43.655834 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:43.655848 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:43.655852 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:43 GMT
	I0916 11:07:43.655855 2175536 round_trippers.go:580]     Audit-Id: 6c2fd7c3-86e9-4032-be5f-d734803119e6
	I0916 11:07:43.655862 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:43.656279 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:44.149942 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:44.149969 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:44.149978 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:44.149983 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:44.152474 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:44.152533 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:44.152542 2175536 round_trippers.go:580]     Audit-Id: d98ef9be-7d86-41f6-a699-a7c21e36aa20
	I0916 11:07:44.152546 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:44.152549 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:44.152551 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:44.152556 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:44.152566 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:44 GMT
	I0916 11:07:44.152678 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:44.153211 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:44.153230 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:44.153239 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:44.153244 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:44.155272 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:44.155294 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:44.155309 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:44.155314 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:44.155317 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:44.155321 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:44 GMT
	I0916 11:07:44.155324 2175536 round_trippers.go:580]     Audit-Id: 6a6ff00f-94a0-4c8c-ba4e-9b6b0b861050
	I0916 11:07:44.155327 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:44.155504 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:44.649519 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:44.649543 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:44.649553 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:44.649560 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:44.651966 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:44.651990 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:44.651998 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:44.652002 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:44.652005 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:44.652010 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:44 GMT
	I0916 11:07:44.652014 2175536 round_trippers.go:580]     Audit-Id: 1cacca30-b09c-48fd-8b96-d756b6bbd471
	I0916 11:07:44.652021 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:44.652276 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:44.652836 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:44.652858 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:44.652867 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:44.652871 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:44.654970 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:44.654993 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:44.655002 2175536 round_trippers.go:580]     Audit-Id: 2fac3b73-4d07-46fa-839a-5b9e68aba17a
	I0916 11:07:44.655006 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:44.655010 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:44.655013 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:44.655017 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:44.655020 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:44 GMT
	I0916 11:07:44.655206 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:44.655582 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:45.150392 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:45.150428 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:45.150438 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:45.150444 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:45.153592 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:45.153622 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:45.153631 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:45 GMT
	I0916 11:07:45.153637 2175536 round_trippers.go:580]     Audit-Id: e0e7cbcb-711f-42a8-a345-553c54251c5c
	I0916 11:07:45.153642 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:45.153646 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:45.153649 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:45.153653 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:45.154385 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:45.155151 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:45.155186 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:45.155200 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:45.155206 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:45.157914 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:45.157937 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:45.157947 2175536 round_trippers.go:580]     Audit-Id: d8ad18d6-1d55-45ed-847e-e7017fe91285
	I0916 11:07:45.157959 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:45.157963 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:45.157967 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:45.157970 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:45.157974 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:45 GMT
	I0916 11:07:45.158586 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:45.649493 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:45.649516 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:45.649525 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:45.649531 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:45.652257 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:45.652352 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:45.652371 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:45 GMT
	I0916 11:07:45.652376 2175536 round_trippers.go:580]     Audit-Id: 28718db1-5ea1-434f-951f-e92c45a31d7f
	I0916 11:07:45.652380 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:45.652382 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:45.652385 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:45.652388 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:45.652594 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:45.653322 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:45.653341 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:45.653379 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:45.653388 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:45.656546 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:45.656576 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:45.656584 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:45.656589 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:45 GMT
	I0916 11:07:45.656592 2175536 round_trippers.go:580]     Audit-Id: 918bc634-55c7-47d8-bbed-18f85c66fbae
	I0916 11:07:45.656595 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:45.656604 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:45.656607 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:45.656716 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:46.149493 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:46.149517 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:46.149528 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:46.149532 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:46.151983 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:46.152010 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:46.152019 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:46.152023 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:46.152029 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:46 GMT
	I0916 11:07:46.152033 2175536 round_trippers.go:580]     Audit-Id: 01b124e8-64cf-49ca-a9f7-d841def34329
	I0916 11:07:46.152040 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:46.152043 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:46.152196 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:46.152764 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:46.152783 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:46.152792 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:46.152797 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:46.155185 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:46.155208 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:46.155218 2175536 round_trippers.go:580]     Audit-Id: d8cfd4cd-9192-4725-a28a-c60bd805bbc5
	I0916 11:07:46.155224 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:46.155228 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:46.155231 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:46.155234 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:46.155236 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:46 GMT
	I0916 11:07:46.155348 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:46.650197 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:46.650225 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:46.650236 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:46.650241 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:46.652588 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:46.652629 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:46.652638 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:46.652643 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:46 GMT
	I0916 11:07:46.652646 2175536 round_trippers.go:580]     Audit-Id: 4537c216-61e2-4212-9da2-a20f475f3ca7
	I0916 11:07:46.652648 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:46.652651 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:46.652654 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:46.652835 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:46.653401 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:46.653419 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:46.653428 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:46.653434 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:46.655538 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:46.655559 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:46.655567 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:46.655571 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:46.655574 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:46.655578 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:46.655581 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:46 GMT
	I0916 11:07:46.655585 2175536 round_trippers.go:580]     Audit-Id: 4353c81e-7c05-4b0c-8f39-6b093875fc66
	I0916 11:07:46.655682 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:46.656046 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:47.149456 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:47.149480 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:47.149490 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:47.149494 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:47.151933 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:47.151955 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:47.151964 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:47.151967 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:47.151970 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:47.151973 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:47.151975 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:47 GMT
	I0916 11:07:47.151978 2175536 round_trippers.go:580]     Audit-Id: 556f65df-bc59-4c2c-aaba-aea2d0117b1b
	I0916 11:07:47.152101 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:47.152632 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:47.152655 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:47.152664 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:47.152668 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:47.154721 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:47.154741 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:47.154748 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:47.154753 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:47.154756 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:47.154760 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:47.154764 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:47 GMT
	I0916 11:07:47.154767 2175536 round_trippers.go:580]     Audit-Id: ce240a75-c622-4304-b4c1-45745fe358af
	I0916 11:07:47.154849 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:47.649417 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:47.649444 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:47.649454 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:47.649459 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:47.652176 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:47.652200 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:47.652208 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:47.652212 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:47 GMT
	I0916 11:07:47.652215 2175536 round_trippers.go:580]     Audit-Id: fdb90aa5-51ed-4f8b-a18f-24770c9031d3
	I0916 11:07:47.652218 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:47.652221 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:47.652224 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:47.652540 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:47.653089 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:47.653107 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:47.653117 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:47.653121 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:47.655203 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:47.655221 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:47.655229 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:47.655238 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:47.655241 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:47.655244 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:47.655248 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:47 GMT
	I0916 11:07:47.655250 2175536 round_trippers.go:580]     Audit-Id: 318fafc3-8aec-4595-a5f6-488f717c1b24
	I0916 11:07:47.655556 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:48.150441 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:48.150482 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:48.150496 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:48.150504 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:48.153149 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:48.153174 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:48.153183 2175536 round_trippers.go:580]     Audit-Id: ffb82b1b-7700-4a59-bb9a-e4b74786db89
	I0916 11:07:48.153189 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:48.153193 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:48.153198 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:48.153201 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:48.153204 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:48 GMT
	I0916 11:07:48.153650 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:48.154208 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:48.154225 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:48.154234 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:48.154238 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:48.156610 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:48.156634 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:48.156644 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:48.156648 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:48.156651 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:48.156654 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:48.156657 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:48 GMT
	I0916 11:07:48.156662 2175536 round_trippers.go:580]     Audit-Id: 385ece48-691f-4228-b168-b1a96d093553
	I0916 11:07:48.156973 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:48.650156 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:48.650183 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:48.650193 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:48.650199 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:48.652559 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:48.652582 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:48.652590 2175536 round_trippers.go:580]     Audit-Id: ac00f509-246d-47e2-b97b-2af32ddecc5b
	I0916 11:07:48.652594 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:48.652598 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:48.652603 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:48.652607 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:48.652611 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:48 GMT
	I0916 11:07:48.652738 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:48.653267 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:48.653276 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:48.653286 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:48.653290 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:48.655187 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:48.655205 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:48.655213 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:48.655217 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:48 GMT
	I0916 11:07:48.655220 2175536 round_trippers.go:580]     Audit-Id: ea21718a-6d06-4b0d-82f4-fd50b1b7300b
	I0916 11:07:48.655223 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:48.655226 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:48.655228 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:48.655326 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:49.149390 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:49.149417 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:49.149427 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:49.149432 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:49.151841 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:49.151909 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:49.151946 2175536 round_trippers.go:580]     Audit-Id: 12353c7b-2061-4868-b2d9-9e0c9ffec00e
	I0916 11:07:49.151970 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:49.151989 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:49.151993 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:49.151996 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:49.151999 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:49 GMT
	I0916 11:07:49.152137 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:49.152686 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:49.152704 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:49.152713 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:49.152720 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:49.154872 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:49.154898 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:49.154907 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:49.154911 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:49.154915 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:49 GMT
	I0916 11:07:49.154919 2175536 round_trippers.go:580]     Audit-Id: b3433736-11e5-40ea-bc6a-6cd2d25b76d3
	I0916 11:07:49.154922 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:49.154925 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:49.155227 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:49.155597 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:49.649840 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:49.649865 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:49.649875 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:49.649879 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:49.652392 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:49.652417 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:49.652427 2175536 round_trippers.go:580]     Audit-Id: 426581de-530c-412d-aac0-21da6a2d9cb3
	I0916 11:07:49.652431 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:49.652434 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:49.652437 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:49.652440 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:49.652442 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:49 GMT
	I0916 11:07:49.652748 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:49.653293 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:49.653314 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:49.653323 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:49.653328 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:49.655446 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:49.655464 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:49.655474 2175536 round_trippers.go:580]     Audit-Id: 22279a98-0ff3-4d5c-88ff-04fb85625866
	I0916 11:07:49.655478 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:49.655483 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:49.655486 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:49.655489 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:49.655492 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:49 GMT
	I0916 11:07:49.655616 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:50.150410 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:50.150437 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:50.150449 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:50.150454 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:50.152861 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:50.152893 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:50.152902 2175536 round_trippers.go:580]     Audit-Id: 2f55421a-e0c5-4c55-8be9-5c3f4cadae6f
	I0916 11:07:50.152908 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:50.152912 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:50.152917 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:50.152921 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:50.152924 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:50 GMT
	I0916 11:07:50.153202 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:50.153782 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:50.153802 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:50.153812 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:50.153821 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:50.156141 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:50.156205 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:50.156221 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:50.156224 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:50.156229 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:50.156232 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:50.156235 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:50 GMT
	I0916 11:07:50.156238 2175536 round_trippers.go:580]     Audit-Id: 6ad7be8a-4e1d-4296-9ac3-b3a10e601ecd
	I0916 11:07:50.156650 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:50.650195 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:50.650227 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:50.650237 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:50.650241 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:50.652612 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:50.652638 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:50.652647 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:50.652651 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:50.652655 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:50.652660 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:50 GMT
	I0916 11:07:50.652663 2175536 round_trippers.go:580]     Audit-Id: d1588a98-5762-41d2-8fbf-f805d2251443
	I0916 11:07:50.652667 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:50.652980 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:50.653556 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:50.653575 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:50.653583 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:50.653588 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:50.655606 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:50.655661 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:50.655683 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:50.655696 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:50.655700 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:50 GMT
	I0916 11:07:50.655703 2175536 round_trippers.go:580]     Audit-Id: 5d184465-404c-4d70-875d-7374b0d015bd
	I0916 11:07:50.655706 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:50.655709 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:50.655848 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:51.150021 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:51.150048 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:51.150058 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:51.150062 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:51.152456 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:51.152482 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:51.152490 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:51 GMT
	I0916 11:07:51.152495 2175536 round_trippers.go:580]     Audit-Id: 97ab8e37-04b4-486e-9d0d-ba4eaee01b7b
	I0916 11:07:51.152499 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:51.152502 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:51.152505 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:51.152508 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:51.152621 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:51.153155 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:51.153165 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:51.153174 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:51.153179 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:51.155315 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:51.155410 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:51.155435 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:51 GMT
	I0916 11:07:51.155453 2175536 round_trippers.go:580]     Audit-Id: f7e627f0-47ce-48f3-8ecf-3c160e7f21a7
	I0916 11:07:51.155488 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:51.155506 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:51.155520 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:51.155535 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:51.155658 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:51.156056 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:51.650211 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:51.650235 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:51.650245 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:51.650251 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:51.652811 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:51.652838 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:51.652855 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:51.652861 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:51.652864 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:51.652867 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:51 GMT
	I0916 11:07:51.652870 2175536 round_trippers.go:580]     Audit-Id: 4f233f57-0150-4db0-8fcd-d43b8f3ab30e
	I0916 11:07:51.652872 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:51.653051 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:51.653695 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:51.653717 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:51.653726 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:51.653731 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:51.656052 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:51.656076 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:51.656084 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:51.656089 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:51 GMT
	I0916 11:07:51.656093 2175536 round_trippers.go:580]     Audit-Id: 496624f5-ef35-4df4-b702-5b48670bc1e6
	I0916 11:07:51.656096 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:51.656100 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:51.656103 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:51.656360 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:52.150059 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:52.150082 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:52.150092 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:52.150096 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:52.152544 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:52.152614 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:52.152637 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:52.152656 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:52.152670 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:52.152705 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:52.152720 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:52 GMT
	I0916 11:07:52.152737 2175536 round_trippers.go:580]     Audit-Id: 4567b755-eec3-4bc9-b5b1-fc9df1641b4c
	I0916 11:07:52.152907 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:52.153488 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:52.153504 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:52.153513 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:52.153538 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:52.155461 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:52.155499 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:52.155508 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:52.155512 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:52.155516 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:52.155519 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:52.155523 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:52 GMT
	I0916 11:07:52.155526 2175536 round_trippers.go:580]     Audit-Id: 0112f3dc-0499-48d5-acbf-e85824b0baa7
	I0916 11:07:52.155858 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:52.649461 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:52.649488 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:52.649498 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:52.649503 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:52.651861 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:52.651883 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:52.651892 2175536 round_trippers.go:580]     Audit-Id: 2c5e38e7-dcd0-447c-9199-acb54c5b7c54
	I0916 11:07:52.651897 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:52.651900 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:52.651905 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:52.651908 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:52.651910 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:52 GMT
	I0916 11:07:52.652224 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:52.652765 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:52.652783 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:52.652792 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:52.652796 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:52.654897 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:52.654916 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:52.654924 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:52.654928 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:52.654932 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:52.654935 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:52 GMT
	I0916 11:07:52.654939 2175536 round_trippers.go:580]     Audit-Id: 698426f6-3eb5-450d-b537-97fc1c1ed382
	I0916 11:07:52.654942 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:52.655432 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:53.149704 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:53.149726 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:53.149736 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:53.149740 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:53.152185 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:53.152209 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:53.152218 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:53 GMT
	I0916 11:07:53.152223 2175536 round_trippers.go:580]     Audit-Id: 8073137f-825d-4d82-9556-fce6c21a17de
	I0916 11:07:53.152228 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:53.152232 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:53.152234 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:53.152237 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:53.152560 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:53.153112 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:53.153131 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:53.153139 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:53.153145 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:53.155230 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:53.155251 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:53.155259 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:53.155263 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:53 GMT
	I0916 11:07:53.155268 2175536 round_trippers.go:580]     Audit-Id: 11f206f8-062c-405b-a400-76d3a3093c8a
	I0916 11:07:53.155271 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:53.155275 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:53.155281 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:53.155735 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:53.156105 2175536 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:07:53.649819 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:53.649851 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:53.649862 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:53.649866 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:53.652347 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:53.652372 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:53.652381 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:53.652385 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:53.652389 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:53.652392 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:53.652395 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:53 GMT
	I0916 11:07:53.652398 2175536 round_trippers.go:580]     Audit-Id: 89db465a-9a82-4dd1-a225-cae09326689e
	I0916 11:07:53.652750 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"386","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6363 chars]
	I0916 11:07:53.653289 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:53.653309 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:53.653318 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:53.653323 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:53.657048 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:53.657070 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:53.657079 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:53 GMT
	I0916 11:07:53.657083 2175536 round_trippers.go:580]     Audit-Id: 6a0236e7-4987-453b-8cbd-92fcb09a53b1
	I0916 11:07:53.657086 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:53.657089 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:53.657092 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:53.657100 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:53.657469 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.150185 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:07:54.150218 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.150231 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.150238 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.152704 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.152725 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.152734 2175536 round_trippers.go:580]     Audit-Id: cbb9f7ff-944f-4751-85c3-e51dfcfaf20c
	I0916 11:07:54.152738 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.152741 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.152744 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.152747 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.152750 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.152888 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6480 chars]
	I0916 11:07:54.153440 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.153451 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.153459 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.153464 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.155559 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.155624 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.155634 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.155640 2175536 round_trippers.go:580]     Audit-Id: 8fbfb155-7139-4376-ae3f-f8b520c396fa
	I0916 11:07:54.155643 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.155646 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.155650 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.155654 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.155799 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.156182 2175536 pod_ready.go:93] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.156203 2175536 pod_ready.go:82] duration metric: took 14.006983415s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.156216 2175536 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.156277 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-890146
	I0916 11:07:54.156286 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.156294 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.156298 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.158347 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.158368 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.158376 2175536 round_trippers.go:580]     Audit-Id: ef48c66a-e4aa-43a9-924a-2cc9cb3f8dec
	I0916 11:07:54.158379 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.158385 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.158388 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.158391 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.158394 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.158662 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-890146","namespace":"kube-system","uid":"59c960ab-f6dd-4ed3-856c-ba2a02295b12","resourceVersion":"327","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.58.2:2379","kubernetes.io/config.hash":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.mirror":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.seen":"2024-09-16T11:07:32.783608135Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6440 chars]
	I0916 11:07:54.159182 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.159202 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.159211 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.159214 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.161577 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.161644 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.161666 2175536 round_trippers.go:580]     Audit-Id: 77bd7f3b-d8fb-49df-9145-f93ccbe0398c
	I0916 11:07:54.161686 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.161717 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.161746 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.161754 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.161758 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.161865 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.162334 2175536 pod_ready.go:93] pod "etcd-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.162354 2175536 pod_ready.go:82] duration metric: took 6.130488ms for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.162368 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.162447 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-890146
	I0916 11:07:54.162457 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.162467 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.162471 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.164797 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.164823 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.164832 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.164836 2175536 round_trippers.go:580]     Audit-Id: 45927122-940d-4178-940b-cef2ca51fe82
	I0916 11:07:54.164840 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.164845 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.164848 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.164851 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.165221 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-890146","namespace":"kube-system","uid":"9846229d-7227-453f-8c30-697aaba61648","resourceVersion":"432","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.58.2:8443","kubernetes.io/config.hash":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.mirror":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.seen":"2024-09-16T11:07:32.783610957Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8518 chars]
	I0916 11:07:54.165794 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.165812 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.165822 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.165829 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.167920 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.167944 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.167952 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.167956 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.167959 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.167962 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.167989 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.167998 2175536 round_trippers.go:580]     Audit-Id: 2f394629-cd1e-44f4-8b79-7f8dc8770370
	I0916 11:07:54.168099 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.168506 2175536 pod_ready.go:93] pod "kube-apiserver-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.168527 2175536 pod_ready.go:82] duration metric: took 6.14903ms for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.168539 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.168611 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-890146
	I0916 11:07:54.168621 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.168630 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.168634 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.170621 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:54.170640 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.170649 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.170655 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.170659 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.170663 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.170666 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.170669 2175536 round_trippers.go:580]     Audit-Id: d4ed699a-ea5d-4917-bcf1-e2df94795316
	I0916 11:07:54.170835 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-890146","namespace":"kube-system","uid":"421b15a5-a8e2-4631-bf18-1592c8747b09","resourceVersion":"436","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.mirror":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.seen":"2024-09-16T11:07:32.783594137Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8093 chars]
	I0916 11:07:54.171380 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.171390 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.171399 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.171403 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.173326 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:54.173382 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.173403 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.173423 2175536 round_trippers.go:580]     Audit-Id: 24f1a1f4-41fa-430f-9230-5c5410a1ec9e
	I0916 11:07:54.173459 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.173480 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.173496 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.173510 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.173647 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.174067 2175536 pod_ready.go:93] pod "kube-controller-manager-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.174087 2175536 pod_ready.go:82] duration metric: took 5.537457ms for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.174098 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.174178 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:07:54.174189 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.174199 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.174205 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.176508 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.176572 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.176596 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.176616 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.176649 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.176682 2175536 round_trippers.go:580]     Audit-Id: 6b5f980f-a01d-4fe8-a85a-7adf75f72f4c
	I0916 11:07:54.176691 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.176694 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.176823 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"412","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6175 chars]
	I0916 11:07:54.177370 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.177391 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.177402 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.177406 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.179591 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.179660 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.179683 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.179703 2175536 round_trippers.go:580]     Audit-Id: d3e95d93-af27-478e-81dc-a8c630bcfcb7
	I0916 11:07:54.179734 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.179759 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.179777 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.179795 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.180432 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.180849 2175536 pod_ready.go:93] pod "kube-proxy-fm5qr" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.180869 2175536 pod_ready.go:82] duration metric: took 6.764043ms for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.180897 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.351235 2175536 request.go:632] Waited for 170.265368ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:07:54.351323 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:07:54.351336 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.351345 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.351352 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.353696 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.353719 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.353727 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.353733 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.353737 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.353741 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.353745 2175536 round_trippers.go:580]     Audit-Id: f39103d4-6912-44e3-b8e3-9457f873d483
	I0916 11:07:54.353748 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.353996 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"438","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 4975 chars]
	I0916 11:07:54.550788 2175536 request.go:632] Waited for 196.346694ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.550915 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:07:54.550932 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.550942 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.550947 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.553266 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.553290 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.553299 2175536 round_trippers.go:580]     Audit-Id: cae5e5e1-ff1a-4e25-9a40-48f48898c14e
	I0916 11:07:54.553304 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.553307 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.553333 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.553343 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.553348 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.554047 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:07:54.554445 2175536 pod_ready.go:93] pod "kube-scheduler-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:07:54.554464 2175536 pod_ready.go:82] duration metric: took 373.554532ms for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:07:54.554473 2175536 pod_ready.go:39] duration metric: took 15.938452327s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:07:54.554491 2175536 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:07:54.554567 2175536 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:07:54.566420 2175536 command_runner.go:130] > 1444
	I0916 11:07:54.566454 2175536 api_server.go:72] duration metric: took 16.88282404s to wait for apiserver process to appear ...
	I0916 11:07:54.566465 2175536 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:07:54.566484 2175536 api_server.go:253] Checking apiserver healthz at https://192.168.58.2:8443/healthz ...
	I0916 11:07:54.574483 2175536 api_server.go:279] https://192.168.58.2:8443/healthz returned 200:
	ok
	I0916 11:07:54.574557 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/version
	I0916 11:07:54.574563 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.574572 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.574582 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.575633 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:07:54.575652 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.575660 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.575664 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.575666 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.575670 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.575673 2175536 round_trippers.go:580]     Content-Length: 263
	I0916 11:07:54.575676 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.575679 2175536 round_trippers.go:580]     Audit-Id: eced1287-bfee-4134-87fc-867965913dd1
	I0916 11:07:54.575694 2175536 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 11:07:54.575787 2175536 api_server.go:141] control plane version: v1.31.1
	I0916 11:07:54.575806 2175536 api_server.go:131] duration metric: took 9.334729ms to wait for apiserver health ...
	I0916 11:07:54.575814 2175536 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:07:54.750271 2175536 request.go:632] Waited for 174.388924ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:07:54.750332 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:07:54.750350 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.750379 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.750384 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.753725 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:54.753751 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.753760 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.753764 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.753769 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.753773 2175536 round_trippers.go:580]     Audit-Id: 871bb79a-785f-4f33-b7d4-3763b7046d46
	I0916 11:07:54.753777 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.753780 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.755048 2175536 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"460"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 58808 chars]
	I0916 11:07:54.758249 2175536 system_pods.go:59] 8 kube-system pods found
	I0916 11:07:54.758283 2175536 system_pods.go:61] "coredns-7c65d6cfc9-vp22b" [a6adb735-448b-480b-aba1-3ce4d56c6fc7] Running
	I0916 11:07:54.758290 2175536 system_pods.go:61] "etcd-multinode-890146" [59c960ab-f6dd-4ed3-856c-ba2a02295b12] Running
	I0916 11:07:54.758296 2175536 system_pods.go:61] "kindnet-dbrhk" [24ef6be7-a1ab-41f7-83c8-aa5af5007281] Running
	I0916 11:07:54.758301 2175536 system_pods.go:61] "kube-apiserver-multinode-890146" [9846229d-7227-453f-8c30-697aaba61648] Running
	I0916 11:07:54.758322 2175536 system_pods.go:61] "kube-controller-manager-multinode-890146" [421b15a5-a8e2-4631-bf18-1592c8747b09] Running
	I0916 11:07:54.758327 2175536 system_pods.go:61] "kube-proxy-fm5qr" [8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73] Running
	I0916 11:07:54.758332 2175536 system_pods.go:61] "kube-scheduler-multinode-890146" [6d440ef3-2e6e-4db9-8c28-2417f7a80c9f] Running
	I0916 11:07:54.758343 2175536 system_pods.go:61] "storage-provisioner" [97795413-5c7a-480b-9cbd-18d4dea5669b] Running
	I0916 11:07:54.758349 2175536 system_pods.go:74] duration metric: took 182.527097ms to wait for pod list to return data ...
	I0916 11:07:54.758362 2175536 default_sa.go:34] waiting for default service account to be created ...
	I0916 11:07:54.950271 2175536 request.go:632] Waited for 191.819086ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:07:54.950341 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:07:54.950351 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:54.950360 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:54.950367 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:54.953007 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:54.953036 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:54.953045 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:54.953049 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:54.953053 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:54.953058 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:54.953061 2175536 round_trippers.go:580]     Content-Length: 261
	I0916 11:07:54.953064 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:54 GMT
	I0916 11:07:54.953067 2175536 round_trippers.go:580]     Audit-Id: 4c463ba2-1ab4-4e04-94c3-b9d58a2613c4
	I0916 11:07:54.953088 2175536 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"460"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"05c19a8a-7c83-4ce8-b18d-3bc9431ca644","resourceVersion":"353","creationTimestamp":"2024-09-16T11:07:37Z"}}]}
	I0916 11:07:54.953273 2175536 default_sa.go:45] found service account: "default"
	I0916 11:07:54.953296 2175536 default_sa.go:55] duration metric: took 194.927755ms for default service account to be created ...
	I0916 11:07:54.953308 2175536 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 11:07:55.150590 2175536 request.go:632] Waited for 197.212256ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:07:55.150669 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:07:55.150715 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:55.150726 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:55.150735 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:55.153817 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:07:55.153913 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:55.153931 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:55.153937 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:55.153942 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:55.153946 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:55 GMT
	I0916 11:07:55.153951 2175536 round_trippers.go:580]     Audit-Id: 746e87a7-e88f-44c6-b990-79d69fb2aba3
	I0916 11:07:55.153955 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:55.154362 2175536 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"460"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 58808 chars]
	I0916 11:07:55.156980 2175536 system_pods.go:86] 8 kube-system pods found
	I0916 11:07:55.157012 2175536 system_pods.go:89] "coredns-7c65d6cfc9-vp22b" [a6adb735-448b-480b-aba1-3ce4d56c6fc7] Running
	I0916 11:07:55.157020 2175536 system_pods.go:89] "etcd-multinode-890146" [59c960ab-f6dd-4ed3-856c-ba2a02295b12] Running
	I0916 11:07:55.157026 2175536 system_pods.go:89] "kindnet-dbrhk" [24ef6be7-a1ab-41f7-83c8-aa5af5007281] Running
	I0916 11:07:55.157032 2175536 system_pods.go:89] "kube-apiserver-multinode-890146" [9846229d-7227-453f-8c30-697aaba61648] Running
	I0916 11:07:55.157038 2175536 system_pods.go:89] "kube-controller-manager-multinode-890146" [421b15a5-a8e2-4631-bf18-1592c8747b09] Running
	I0916 11:07:55.157043 2175536 system_pods.go:89] "kube-proxy-fm5qr" [8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73] Running
	I0916 11:07:55.157047 2175536 system_pods.go:89] "kube-scheduler-multinode-890146" [6d440ef3-2e6e-4db9-8c28-2417f7a80c9f] Running
	I0916 11:07:55.157051 2175536 system_pods.go:89] "storage-provisioner" [97795413-5c7a-480b-9cbd-18d4dea5669b] Running
	I0916 11:07:55.157098 2175536 system_pods.go:126] duration metric: took 203.779648ms to wait for k8s-apps to be running ...
	I0916 11:07:55.157115 2175536 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:07:55.157190 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:07:55.170989 2175536 system_svc.go:56] duration metric: took 13.865389ms WaitForService to wait for kubelet
	I0916 11:07:55.171018 2175536 kubeadm.go:582] duration metric: took 17.487386673s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:07:55.171038 2175536 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:07:55.350348 2175536 request.go:632] Waited for 179.235487ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes
	I0916 11:07:55.350407 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes
	I0916 11:07:55.350413 2175536 round_trippers.go:469] Request Headers:
	I0916 11:07:55.350422 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:07:55.350428 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:07:55.353203 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:07:55.353224 2175536 round_trippers.go:577] Response Headers:
	I0916 11:07:55.353232 2175536 round_trippers.go:580]     Audit-Id: 6f008049-ddcf-4dcc-9e47-1eece76d2011
	I0916 11:07:55.353236 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:07:55.353240 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:07:55.353244 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:07:55.353262 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:07:55.353266 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:07:55 GMT
	I0916 11:07:55.353396 2175536 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"460"},"items":[{"metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"manag
edFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1", [truncated 5156 chars]
	I0916 11:07:55.353893 2175536 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:07:55.353919 2175536 node_conditions.go:123] node cpu capacity is 2
	I0916 11:07:55.353930 2175536 node_conditions.go:105] duration metric: took 182.886291ms to run NodePressure ...
	I0916 11:07:55.353943 2175536 start.go:241] waiting for startup goroutines ...
	I0916 11:07:55.353950 2175536 start.go:246] waiting for cluster config update ...
	I0916 11:07:55.353965 2175536 start.go:255] writing updated cluster config ...
	I0916 11:07:55.356664 2175536 out.go:201] 
	I0916 11:07:55.358753 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:07:55.358847 2175536 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:07:55.361296 2175536 out.go:177] * Starting "multinode-890146-m02" worker node in "multinode-890146" cluster
	I0916 11:07:55.363441 2175536 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:07:55.365739 2175536 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:07:55.368296 2175536 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:07:55.368335 2175536 cache.go:56] Caching tarball of preloaded images
	I0916 11:07:55.368385 2175536 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:07:55.368448 2175536 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 11:07:55.368465 2175536 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 11:07:55.368555 2175536 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	W0916 11:07:55.387660 2175536 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:07:55.387681 2175536 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:07:55.387779 2175536 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:07:55.387802 2175536 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:07:55.387807 2175536 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:07:55.387815 2175536 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:07:55.387820 2175536 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:07:55.388995 2175536 image.go:273] response: 
	I0916 11:07:55.502327 2175536 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:07:55.502368 2175536 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:07:55.502398 2175536 start.go:360] acquireMachinesLock for multinode-890146-m02: {Name:mkb193e5e8454b4e97e0a3d9e40e1ee2de147629 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:07:55.502517 2175536 start.go:364] duration metric: took 98.018µs to acquireMachinesLock for "multinode-890146-m02"
	I0916 11:07:55.502548 2175536 start.go:93] Provisioning new machine with config: &{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount
9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}
	I0916 11:07:55.502637 2175536 start.go:125] createHost starting for "m02" (driver="docker")
	I0916 11:07:55.506447 2175536 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 11:07:55.506610 2175536 start.go:159] libmachine.API.Create for "multinode-890146" (driver="docker")
	I0916 11:07:55.506651 2175536 client.go:168] LocalClient.Create starting
	I0916 11:07:55.506780 2175536 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 11:07:55.506830 2175536 main.go:141] libmachine: Decoding PEM data...
	I0916 11:07:55.506849 2175536 main.go:141] libmachine: Parsing certificate...
	I0916 11:07:55.506914 2175536 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 11:07:55.506936 2175536 main.go:141] libmachine: Decoding PEM data...
	I0916 11:07:55.506954 2175536 main.go:141] libmachine: Parsing certificate...
	I0916 11:07:55.507280 2175536 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:07:55.521699 2175536 network_create.go:77] Found existing network {name:multinode-890146 subnet:0x40012521e0 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 58 1] mtu:1500}
	I0916 11:07:55.521747 2175536 kic.go:121] calculated static IP "192.168.58.3" for the "multinode-890146-m02" container
	I0916 11:07:55.521818 2175536 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:07:55.537755 2175536 cli_runner.go:164] Run: docker volume create multinode-890146-m02 --label name.minikube.sigs.k8s.io=multinode-890146-m02 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:07:55.554187 2175536 oci.go:103] Successfully created a docker volume multinode-890146-m02
	I0916 11:07:55.554273 2175536 cli_runner.go:164] Run: docker run --rm --name multinode-890146-m02-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-890146-m02 --entrypoint /usr/bin/test -v multinode-890146-m02:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:07:56.125492 2175536 oci.go:107] Successfully prepared a docker volume multinode-890146-m02
	I0916 11:07:56.125538 2175536 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:07:56.125558 2175536 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:07:56.125628 2175536 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-890146-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:08:00.302138 2175536 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-890146-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir: (4.176450501s)
	I0916 11:08:00.302172 2175536 kic.go:203] duration metric: took 4.176608489s to extract preloaded images to volume ...
	W0916 11:08:00.302330 2175536 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:08:00.302447 2175536 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:08:00.444886 2175536 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname multinode-890146-m02 --name multinode-890146-m02 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-890146-m02 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=multinode-890146-m02 --network multinode-890146 --ip 192.168.58.3 --volume multinode-890146-m02:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:08:00.843116 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Running}}
	I0916 11:08:00.867003 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:08:00.894286 2175536 cli_runner.go:164] Run: docker exec multinode-890146-m02 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:08:00.951628 2175536 oci.go:144] the created container "multinode-890146-m02" has a running status.
	I0916 11:08:00.951665 2175536 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa...
	I0916 11:08:01.178266 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0916 11:08:01.178358 2175536 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:08:01.208797 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:08:01.235936 2175536 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:08:01.235957 2175536 kic_runner.go:114] Args: [docker exec --privileged multinode-890146-m02 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:08:01.324883 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:08:01.350434 2175536 machine.go:93] provisionDockerMachine start ...
	I0916 11:08:01.350531 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:01.384040 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:08:01.384327 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40722 <nil> <nil>}
	I0916 11:08:01.384336 2175536 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:08:01.385217 2175536 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: read tcp 127.0.0.1:56210->127.0.0.1:40722: read: connection reset by peer
	I0916 11:08:04.522325 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146-m02
	
	I0916 11:08:04.522351 2175536 ubuntu.go:169] provisioning hostname "multinode-890146-m02"
	I0916 11:08:04.522426 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:04.539702 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:08:04.539946 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40722 <nil> <nil>}
	I0916 11:08:04.539965 2175536 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-890146-m02 && echo "multinode-890146-m02" | sudo tee /etc/hostname
	I0916 11:08:04.691976 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146-m02
	
	I0916 11:08:04.692065 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:04.713951 2175536 main.go:141] libmachine: Using SSH client type: native
	I0916 11:08:04.714195 2175536 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40722 <nil> <nil>}
	I0916 11:08:04.714213 2175536 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-890146-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-890146-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-890146-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:08:04.850953 2175536 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:08:04.850990 2175536 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 11:08:04.851006 2175536 ubuntu.go:177] setting up certificates
	I0916 11:08:04.851017 2175536 provision.go:84] configureAuth start
	I0916 11:08:04.851079 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:08:04.869185 2175536 provision.go:143] copyHostCerts
	I0916 11:08:04.869232 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:08:04.869267 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 11:08:04.869279 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:08:04.869360 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 11:08:04.869446 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:08:04.869468 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 11:08:04.869473 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:08:04.869500 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 11:08:04.869544 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:08:04.869566 2175536 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 11:08:04.869575 2175536 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:08:04.869602 2175536 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 11:08:04.869656 2175536 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.multinode-890146-m02 san=[127.0.0.1 192.168.58.3 localhost minikube multinode-890146-m02]
	I0916 11:08:05.328262 2175536 provision.go:177] copyRemoteCerts
	I0916 11:08:05.328334 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:08:05.328379 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:05.345782 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40722 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:08:05.448131 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:08:05.448198 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1229 bytes)
	I0916 11:08:05.473486 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:08:05.473555 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:08:05.499338 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:08:05.499407 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 11:08:05.525060 2175536 provision.go:87] duration metric: took 674.028955ms to configureAuth
	I0916 11:08:05.525086 2175536 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:08:05.525287 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:08:05.525301 2175536 machine.go:96] duration metric: took 4.174844267s to provisionDockerMachine
	I0916 11:08:05.525311 2175536 client.go:171] duration metric: took 10.018650112s to LocalClient.Create
	I0916 11:08:05.525333 2175536 start.go:167] duration metric: took 10.018724344s to libmachine.API.Create "multinode-890146"
	I0916 11:08:05.525345 2175536 start.go:293] postStartSetup for "multinode-890146-m02" (driver="docker")
	I0916 11:08:05.525355 2175536 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:08:05.525436 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:08:05.525481 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:05.542154 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40722 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:08:05.647159 2175536 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:08:05.651393 2175536 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:08:05.651414 2175536 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:08:05.651429 2175536 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:08:05.651434 2175536 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:08:05.651439 2175536 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:08:05.651443 2175536 command_runner.go:130] > ID=ubuntu
	I0916 11:08:05.651447 2175536 command_runner.go:130] > ID_LIKE=debian
	I0916 11:08:05.651452 2175536 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:08:05.651460 2175536 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:08:05.651472 2175536 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:08:05.651480 2175536 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:08:05.651487 2175536 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:08:05.651561 2175536 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:08:05.651593 2175536 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:08:05.651607 2175536 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:08:05.651620 2175536 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:08:05.651636 2175536 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 11:08:05.651707 2175536 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 11:08:05.651805 2175536 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 11:08:05.651813 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 11:08:05.651923 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:08:05.661612 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:08:05.687361 2175536 start.go:296] duration metric: took 162.000013ms for postStartSetup
	I0916 11:08:05.687789 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:08:05.704259 2175536 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:08:05.704570 2175536 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:08:05.704624 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:05.720743 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40722 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:08:05.815588 2175536 command_runner.go:130] > 21%
	I0916 11:08:05.815671 2175536 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:08:05.819885 2175536 command_runner.go:130] > 154G
	I0916 11:08:05.820327 2175536 start.go:128] duration metric: took 10.317676147s to createHost
	I0916 11:08:05.820349 2175536 start.go:83] releasing machines lock for "multinode-890146-m02", held for 10.31781844s
	I0916 11:08:05.820424 2175536 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:08:05.840029 2175536 out.go:177] * Found network options:
	I0916 11:08:05.842151 2175536 out.go:177]   - NO_PROXY=192.168.58.2
	W0916 11:08:05.844425 2175536 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:08:05.844485 2175536 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:08:05.844565 2175536 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:08:05.844613 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:05.844891 2175536 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:08:05.844951 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:08:05.865699 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40722 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:08:05.867089 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40722 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:08:05.959825 2175536 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:08:05.959899 2175536 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:08:05.959928 2175536 command_runner.go:130] > Device: f4h/244d	Inode: 1301117     Links: 1
	I0916 11:08:05.959946 2175536 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:08:05.959975 2175536 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:08:05.960021 2175536 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0916 11:08:05.960051 2175536 command_runner.go:130] > Change: 2024-09-16 10:29:56.970000846 +0000
	I0916 11:08:05.960070 2175536 command_runner.go:130] >  Birth: 2024-09-16 10:29:56.970000846 +0000
	I0916 11:08:05.960447 2175536 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 11:08:06.090370 2175536 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:08:06.093948 2175536 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:08:06.094092 2175536 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:08:06.125402 2175536 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf, 
	I0916 11:08:06.125428 2175536 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:08:06.125436 2175536 start.go:495] detecting cgroup driver to use...
	I0916 11:08:06.125470 2175536 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:08:06.125523 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 11:08:06.138592 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 11:08:06.151214 2175536 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:08:06.151287 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:08:06.165633 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:08:06.181163 2175536 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:08:06.278134 2175536 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:08:06.383302 2175536 command_runner.go:130] ! Created symlink /etc/systemd/system/cri-docker.service → /dev/null.
	I0916 11:08:06.383384 2175536 docker.go:233] disabling docker service ...
	I0916 11:08:06.383460 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:08:06.406049 2175536 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:08:06.417868 2175536 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:08:06.511389 2175536 command_runner.go:130] ! Removed /etc/systemd/system/sockets.target.wants/docker.socket.
	I0916 11:08:06.511469 2175536 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:08:06.608939 2175536 command_runner.go:130] ! Created symlink /etc/systemd/system/docker.service → /dev/null.
	I0916 11:08:06.609020 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:08:06.621543 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:08:06.640183 2175536 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0916 11:08:06.644666 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 11:08:06.656578 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 11:08:06.668824 2175536 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 11:08:06.668921 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 11:08:06.681467 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:08:06.695397 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 11:08:06.708191 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:08:06.719433 2175536 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:08:06.729479 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 11:08:06.740279 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 11:08:06.750664 2175536 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 11:08:06.763254 2175536 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:08:06.771014 2175536 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:08:06.772406 2175536 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:08:06.781503 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:08:06.874948 2175536 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 11:08:07.020548 2175536 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 11:08:07.020707 2175536 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 11:08:07.024371 2175536 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0916 11:08:07.024440 2175536 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:08:07.024460 2175536 command_runner.go:130] > Device: fdh/253d	Inode: 175         Links: 1
	I0916 11:08:07.024481 2175536 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:08:07.024512 2175536 command_runner.go:130] > Access: 2024-09-16 11:08:06.964092055 +0000
	I0916 11:08:07.024547 2175536 command_runner.go:130] > Modify: 2024-09-16 11:08:06.964092055 +0000
	I0916 11:08:07.024575 2175536 command_runner.go:130] > Change: 2024-09-16 11:08:06.964092055 +0000
	I0916 11:08:07.024593 2175536 command_runner.go:130] >  Birth: -
	I0916 11:08:07.024862 2175536 start.go:563] Will wait 60s for crictl version
	I0916 11:08:07.024953 2175536 ssh_runner.go:195] Run: which crictl
	I0916 11:08:07.028218 2175536 command_runner.go:130] > /usr/bin/crictl
	I0916 11:08:07.028574 2175536 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:08:07.071564 2175536 command_runner.go:130] > Version:  0.1.0
	I0916 11:08:07.071636 2175536 command_runner.go:130] > RuntimeName:  containerd
	I0916 11:08:07.071655 2175536 command_runner.go:130] > RuntimeVersion:  1.7.22
	I0916 11:08:07.071673 2175536 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:08:07.074372 2175536 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 11:08:07.074497 2175536 ssh_runner.go:195] Run: containerd --version
	I0916 11:08:07.097053 2175536 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:08:07.097148 2175536 ssh_runner.go:195] Run: containerd --version
	I0916 11:08:07.119375 2175536 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:08:07.125432 2175536 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 11:08:07.127244 2175536 out.go:177]   - env NO_PROXY=192.168.58.2
	I0916 11:08:07.129462 2175536 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:08:07.145671 2175536 ssh_runner.go:195] Run: grep 192.168.58.1	host.minikube.internal$ /etc/hosts
	I0916 11:08:07.149762 2175536 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.58.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:08:07.161040 2175536 mustload.go:65] Loading cluster: multinode-890146
	I0916 11:08:07.161250 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:08:07.161520 2175536 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:08:07.179177 2175536 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:08:07.179488 2175536 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146 for IP: 192.168.58.3
	I0916 11:08:07.179502 2175536 certs.go:194] generating shared ca certs ...
	I0916 11:08:07.179518 2175536 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:08:07.179847 2175536 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 11:08:07.182772 2175536 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 11:08:07.182805 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:08:07.182852 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:08:07.182868 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:08:07.182880 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:08:07.182940 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 11:08:07.182980 2175536 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 11:08:07.182989 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:08:07.183013 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 11:08:07.183034 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:08:07.183055 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 11:08:07.183101 2175536 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:08:07.183128 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 11:08:07.183141 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:08:07.183152 2175536 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 11:08:07.183170 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:08:07.209946 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 11:08:07.235360 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:08:07.260765 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 11:08:07.285760 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 11:08:07.310087 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:08:07.335622 2175536 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 11:08:07.361368 2175536 ssh_runner.go:195] Run: openssl version
	I0916 11:08:07.366767 2175536 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:08:07.367237 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:08:07.376822 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:08:07.380555 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:08:07.380600 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:08:07.380650 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:08:07.389700 2175536 command_runner.go:130] > b5213941
	I0916 11:08:07.390146 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:08:07.399530 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 11:08:07.408966 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 11:08:07.412600 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:08:07.412639 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:08:07.412695 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 11:08:07.419633 2175536 command_runner.go:130] > 51391683
	I0916 11:08:07.420189 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 11:08:07.430005 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 11:08:07.439393 2175536 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 11:08:07.442833 2175536 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:08:07.442881 2175536 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:08:07.442961 2175536 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 11:08:07.449629 2175536 command_runner.go:130] > 3ec20f2e
	I0916 11:08:07.450113 2175536 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:08:07.459786 2175536 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:08:07.463161 2175536 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:08:07.463195 2175536 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:08:07.463250 2175536 kubeadm.go:934] updating node {m02 192.168.58.3 8443 v1.31.1 containerd false true} ...
	I0916 11:08:07.463350 2175536 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-890146-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.58.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:08:07.463424 2175536 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:08:07.471270 2175536 command_runner.go:130] > kubeadm
	I0916 11:08:07.471289 2175536 command_runner.go:130] > kubectl
	I0916 11:08:07.471294 2175536 command_runner.go:130] > kubelet
	I0916 11:08:07.472417 2175536 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:08:07.472512 2175536 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 11:08:07.481280 2175536 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (324 bytes)
	I0916 11:08:07.500033 2175536 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:08:07.518293 2175536 ssh_runner.go:195] Run: grep 192.168.58.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:08:07.521622 2175536 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.58.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:08:07.532421 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:08:07.616946 2175536 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:08:07.632778 2175536 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:08:07.633087 2175536 start.go:317] joinCluster: &{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p
2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:08:07.633181 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0916 11:08:07.633232 2175536 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:08:07.655231 2175536 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:08:07.813899 2175536 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token ptf107.9si9q29neobz8pd1 --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 11:08:07.813944 2175536 start.go:343] trying to join worker node "m02" to cluster: &{Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}
	I0916 11:08:07.813977 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token ptf107.9si9q29neobz8pd1 --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=multinode-890146-m02"
	I0916 11:08:07.856516 2175536 command_runner.go:130] > [preflight] Running pre-flight checks
	I0916 11:08:07.867055 2175536 command_runner.go:130] > [preflight] The system verification failed. Printing the output from the verification:
	I0916 11:08:07.867081 2175536 command_runner.go:130] > KERNEL_VERSION: 5.15.0-1069-aws
	I0916 11:08:07.867087 2175536 command_runner.go:130] > OS: Linux
	I0916 11:08:07.867094 2175536 command_runner.go:130] > CGROUPS_CPU: enabled
	I0916 11:08:07.867100 2175536 command_runner.go:130] > CGROUPS_CPUACCT: enabled
	I0916 11:08:07.867106 2175536 command_runner.go:130] > CGROUPS_CPUSET: enabled
	I0916 11:08:07.867112 2175536 command_runner.go:130] > CGROUPS_DEVICES: enabled
	I0916 11:08:07.867125 2175536 command_runner.go:130] > CGROUPS_FREEZER: enabled
	I0916 11:08:07.867133 2175536 command_runner.go:130] > CGROUPS_MEMORY: enabled
	I0916 11:08:07.867139 2175536 command_runner.go:130] > CGROUPS_PIDS: enabled
	I0916 11:08:07.867147 2175536 command_runner.go:130] > CGROUPS_HUGETLB: enabled
	I0916 11:08:07.867153 2175536 command_runner.go:130] > CGROUPS_BLKIO: enabled
	I0916 11:08:07.961931 2175536 command_runner.go:130] > [preflight] Reading configuration from the cluster...
	I0916 11:08:07.961957 2175536 command_runner.go:130] > [preflight] FYI: You can look at this config file with 'kubectl -n kube-system get cm kubeadm-config -o yaml'
	I0916 11:08:08.004665 2175536 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0916 11:08:08.005488 2175536 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0916 11:08:08.005517 2175536 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0916 11:08:08.108491 2175536 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0916 11:08:09.610208 2175536 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.501897178s
	I0916 11:08:09.610236 2175536 command_runner.go:130] > [kubelet-start] Waiting for the kubelet to perform the TLS Bootstrap
	I0916 11:08:09.634985 2175536 command_runner.go:130] > This node has joined the cluster:
	I0916 11:08:09.635006 2175536 command_runner.go:130] > * Certificate signing request was sent to apiserver and a response was received.
	I0916 11:08:09.635013 2175536 command_runner.go:130] > * The Kubelet was informed of the new secure connection details.
	I0916 11:08:09.635020 2175536 command_runner.go:130] > Run 'kubectl get nodes' on the control-plane to see this node join the cluster.
	I0916 11:08:09.638528 2175536 command_runner.go:130] ! 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:08:09.638559 2175536 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:08:09.638576 2175536 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.31.1:$PATH" kubeadm join control-plane.minikube.internal:8443 --token ptf107.9si9q29neobz8pd1 --discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=multinode-890146-m02": (1.824584701s)
	I0916 11:08:09.638594 2175536 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0916 11:08:09.826606 2175536 command_runner.go:130] ! Created symlink /etc/systemd/system/multi-user.target.wants/kubelet.service → /lib/systemd/system/kubelet.service.
	I0916 11:08:09.826836 2175536 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-890146-m02 minikube.k8s.io/updated_at=2024_09_16T11_08_09_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=multinode-890146 minikube.k8s.io/primary=false
	I0916 11:08:09.930074 2175536 command_runner.go:130] > node/multinode-890146-m02 labeled
	I0916 11:08:09.934376 2175536 start.go:319] duration metric: took 2.301283567s to joinCluster
	I0916 11:08:09.934475 2175536 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}
	I0916 11:08:09.934886 2175536 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:08:09.936448 2175536 out.go:177] * Verifying Kubernetes components...
	I0916 11:08:09.938297 2175536 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:08:10.046015 2175536 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:08:10.061308 2175536 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:08:10.061600 2175536 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:08:10.061895 2175536 node_ready.go:35] waiting up to 6m0s for node "multinode-890146-m02" to be "Ready" ...
	I0916 11:08:10.061985 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:10.061997 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.062006 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.062012 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.064827 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.064861 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.064870 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.064875 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.064879 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.064883 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.064886 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.064889 2175536 round_trippers.go:580]     Audit-Id: b1ad4d49-0b33-40c0-acd8-87c788f26df9
	I0916 11:08:10.065007 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"499","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4486 chars]
	I0916 11:08:10.562184 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:10.562211 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.562240 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.562245 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.564636 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.564661 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.564669 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.564673 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.564678 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.564681 2175536 round_trippers.go:580]     Audit-Id: 1cf414cc-b1f3-4920-a9dc-a885ace12ed0
	I0916 11:08:10.564684 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.564689 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.564815 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:10.565187 2175536 node_ready.go:49] node "multinode-890146-m02" has status "Ready":"True"
	I0916 11:08:10.565201 2175536 node_ready.go:38] duration metric: took 503.286116ms for node "multinode-890146-m02" to be "Ready" ...
	I0916 11:08:10.565210 2175536 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:08:10.565273 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:08:10.565279 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.565287 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.565291 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.568523 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:08:10.568590 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.568624 2175536 round_trippers.go:580]     Audit-Id: 6a0b9f9e-22ad-4a40-83aa-450cc457cb96
	I0916 11:08:10.568635 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.568669 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.568680 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.568683 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.568686 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.569951 2175536 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"503"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 69157 chars]
	I0916 11:08:10.573175 2175536 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.573304 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:08:10.573318 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.573327 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.573333 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.576950 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:08:10.576973 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.576982 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.576987 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.576990 2175536 round_trippers.go:580]     Audit-Id: 03285052-af0c-43bf-b0c2-a3a2a6c9acdc
	I0916 11:08:10.576993 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.576996 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.577000 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.577117 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6480 chars]
	I0916 11:08:10.577682 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:10.577693 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.577701 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.577705 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.580094 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.580157 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.580183 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.580203 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.580214 2175536 round_trippers.go:580]     Audit-Id: 0ddfb439-4011-4e4c-823c-4e38896653d8
	I0916 11:08:10.580218 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.580224 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.580227 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.580393 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:10.580772 2175536 pod_ready.go:93] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:10.580794 2175536 pod_ready.go:82] duration metric: took 7.572236ms for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.580806 2175536 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.580874 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-890146
	I0916 11:08:10.580885 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.580893 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.580899 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.583233 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.583254 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.583262 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.583269 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.583279 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.583282 2175536 round_trippers.go:580]     Audit-Id: 9fe472de-f4b3-4a16-8cec-5efd0c165a4c
	I0916 11:08:10.583285 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.583287 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.583459 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-890146","namespace":"kube-system","uid":"59c960ab-f6dd-4ed3-856c-ba2a02295b12","resourceVersion":"327","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.58.2:2379","kubernetes.io/config.hash":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.mirror":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.seen":"2024-09-16T11:07:32.783608135Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6440 chars]
	I0916 11:08:10.583987 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:10.584007 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.584017 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.584022 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.586155 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.586178 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.586186 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.586190 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.586192 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.586197 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.586201 2175536 round_trippers.go:580]     Audit-Id: b682bbbd-8dd8-4a62-b11c-cc6e100901c7
	I0916 11:08:10.586203 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.586399 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:10.586855 2175536 pod_ready.go:93] pod "etcd-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:10.586883 2175536 pod_ready.go:82] duration metric: took 6.065667ms for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.586910 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.586984 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-890146
	I0916 11:08:10.586996 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.587005 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.587009 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.589233 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.589254 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.589262 2175536 round_trippers.go:580]     Audit-Id: 0144dd60-5378-4178-acb9-4c51669f48d4
	I0916 11:08:10.589265 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.589268 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.589270 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.589273 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.589276 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.589505 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-890146","namespace":"kube-system","uid":"9846229d-7227-453f-8c30-697aaba61648","resourceVersion":"432","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.58.2:8443","kubernetes.io/config.hash":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.mirror":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.seen":"2024-09-16T11:07:32.783610957Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8518 chars]
	I0916 11:08:10.590104 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:10.590123 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.590132 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.590137 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.592238 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.592274 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.592283 2175536 round_trippers.go:580]     Audit-Id: 89508ee3-1f81-43d8-bf7c-0da4e90c1ac9
	I0916 11:08:10.592288 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.592293 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.592297 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.592300 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.592303 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.592688 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:10.593085 2175536 pod_ready.go:93] pod "kube-apiserver-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:10.593105 2175536 pod_ready.go:82] duration metric: took 6.187685ms for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.593116 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.593229 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-890146
	I0916 11:08:10.593242 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.593251 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.593255 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.595341 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.595363 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.595371 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.595374 2175536 round_trippers.go:580]     Audit-Id: c219233d-4a00-41ab-9483-14f0fc685b13
	I0916 11:08:10.595377 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.595382 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.595384 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.595387 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.595647 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-890146","namespace":"kube-system","uid":"421b15a5-a8e2-4631-bf18-1592c8747b09","resourceVersion":"436","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.mirror":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.seen":"2024-09-16T11:07:32.783594137Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8093 chars]
	I0916 11:08:10.596184 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:10.596203 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.596211 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.596214 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.598211 2175536 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:08:10.598232 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.598240 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.598245 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.598248 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.598252 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.598257 2175536 round_trippers.go:580]     Audit-Id: 3b6b0777-42b3-4f85-af69-59a9bc2435d2
	I0916 11:08:10.598260 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.598565 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:10.598988 2175536 pod_ready.go:93] pod "kube-controller-manager-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:10.599008 2175536 pod_ready.go:82] duration metric: took 5.878714ms for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.599020 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:10.762322 2175536 request.go:632] Waited for 163.220346ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:10.762393 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:10.762403 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.762412 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.762419 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.764731 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.764804 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.764827 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.764846 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.764860 2175536 round_trippers.go:580]     Audit-Id: 7e4ac130-437c-445a-9474-91da24dc70ab
	I0916 11:08:10.764899 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.764916 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.764941 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.765049 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"492","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 4018 chars]
	I0916 11:08:10.962847 2175536 request.go:632] Waited for 197.357703ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:10.962915 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:10.962925 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:10.962944 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:10.962959 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:10.965292 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:10.965318 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:10.965327 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:10.965333 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:10.965338 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:10 GMT
	I0916 11:08:10.965342 2175536 round_trippers.go:580]     Audit-Id: ea137220-6640-48b8-8a78-ef9c9366d0b3
	I0916 11:08:10.965353 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:10.965356 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:10.965661 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:11.162821 2175536 request.go:632] Waited for 63.218207ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:11.162887 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:11.162897 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:11.162906 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:11.162914 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:11.171386 2175536 round_trippers.go:574] Response Status: 200 OK in 8 milliseconds
	I0916 11:08:11.171415 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:11.171424 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:11.171429 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:11.171432 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:11.171436 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:11.171440 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:11 GMT
	I0916 11:08:11.171442 2175536 round_trippers.go:580]     Audit-Id: e29516a2-3d43-431c-9ab7-d2f8bb7c2f57
	I0916 11:08:11.171940 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"492","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 4018 chars]
	I0916 11:08:11.362795 2175536 request.go:632] Waited for 190.415339ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:11.362944 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:11.362975 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:11.362998 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:11.363012 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:11.365404 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:11.365430 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:11.365445 2175536 round_trippers.go:580]     Audit-Id: 982f9db0-7d1b-432d-87f9-5b66bee9fce9
	I0916 11:08:11.365452 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:11.365455 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:11.365458 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:11.365461 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:11.365469 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:11 GMT
	I0916 11:08:11.365777 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:11.600294 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:11.600413 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:11.600450 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:11.600506 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:11.604249 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:08:11.604290 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:11.604299 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:11.604303 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:11.604306 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:11.604311 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:11 GMT
	I0916 11:08:11.604314 2175536 round_trippers.go:580]     Audit-Id: 5b13aca1-e689-4ed6-a98d-6ed21a617fd1
	I0916 11:08:11.604317 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:11.604910 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"505","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6209 chars]
	I0916 11:08:11.762785 2175536 request.go:632] Waited for 157.323121ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:11.762931 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:11.762950 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:11.762959 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:11.762966 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:11.767332 2175536 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:08:11.767401 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:11.767423 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:11.767440 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:11 GMT
	I0916 11:08:11.767456 2175536 round_trippers.go:580]     Audit-Id: b06c9cbf-8916-4564-b607-b4b63aa09b76
	I0916 11:08:11.767481 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:11.767501 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:11.767516 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:11.767699 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:12.099720 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:12.099748 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.099758 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.099762 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.102443 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.102470 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.102483 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.102488 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.102491 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.102495 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.102498 2175536 round_trippers.go:580]     Audit-Id: c5cab472-8457-492b-98fb-f0936be34abe
	I0916 11:08:12.102500 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.102796 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"505","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6209 chars]
	I0916 11:08:12.162653 2175536 request.go:632] Waited for 59.256414ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:12.162771 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:12.162778 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.162787 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.162794 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.165185 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.165207 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.165216 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.165220 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.165222 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.165225 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.165227 2175536 round_trippers.go:580]     Audit-Id: 757781fc-198b-49dc-8b83-283ae57f1413
	I0916 11:08:12.165230 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.165610 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:12.599287 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:08:12.599312 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.599322 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.599328 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.601604 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.601626 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.601634 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.601640 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.601643 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.601646 2175536 round_trippers.go:580]     Audit-Id: e7b073e2-d3ba-4abf-aef2-4c6807f9795d
	I0916 11:08:12.601649 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.601651 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.601962 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"519","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6183 chars]
	I0916 11:08:12.602505 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:08:12.602523 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.602532 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.602538 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.604593 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.604620 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.604628 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.604633 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.604637 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.604639 2175536 round_trippers.go:580]     Audit-Id: 198fd972-db21-44cc-9dac-ea54d2099356
	I0916 11:08:12.604643 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.604651 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.604965 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"503","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 4891 chars]
	I0916 11:08:12.605324 2175536 pod_ready.go:93] pod "kube-proxy-59f9h" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:12.605344 2175536 pod_ready.go:82] duration metric: took 2.006315508s for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:12.605356 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:12.762778 2175536 request.go:632] Waited for 157.286248ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:08:12.762838 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:08:12.762845 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.762853 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.762863 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.765379 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.765447 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.765484 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.765506 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.765522 2175536 round_trippers.go:580]     Audit-Id: 3ae5b81b-9737-4d7d-9c3c-4cc2ca8354c4
	I0916 11:08:12.765539 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.765567 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.765587 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.765750 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"412","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6175 chars]
	I0916 11:08:12.962648 2175536 request.go:632] Waited for 196.340469ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:12.962772 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:12.962786 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:12.962796 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:12.962800 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:12.965196 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:12.965262 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:12.965285 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:12.965302 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:12 GMT
	I0916 11:08:12.965317 2175536 round_trippers.go:580]     Audit-Id: e1e2d869-d8ed-4096-a5f3-2a88a7eaf374
	I0916 11:08:12.965347 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:12.965369 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:12.965378 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:12.965591 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:12.965993 2175536 pod_ready.go:93] pod "kube-proxy-fm5qr" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:12.966013 2175536 pod_ready.go:82] duration metric: took 360.649916ms for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:12.966028 2175536 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:13.162762 2175536 request.go:632] Waited for 196.605419ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:08:13.162831 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:08:13.162868 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:13.162880 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:13.162885 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:13.165421 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:13.165449 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:13.165458 2175536 round_trippers.go:580]     Audit-Id: 2dcf7ab0-d2e0-4cf4-894b-326dc18f9cc6
	I0916 11:08:13.165464 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:13.165469 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:13.165477 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:13.165483 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:13.165487 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:13 GMT
	I0916 11:08:13.165816 2175536 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"438","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 4975 chars]
	I0916 11:08:13.362859 2175536 request.go:632] Waited for 196.441481ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:13.362932 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:08:13.362941 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:13.362950 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:13.362957 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:13.365475 2175536 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:08:13.365510 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:13.365520 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:13.365524 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:13 GMT
	I0916 11:08:13.365528 2175536 round_trippers.go:580]     Audit-Id: 27e6524a-64b8-4349-893b-6758c536e036
	I0916 11:08:13.365532 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:13.365535 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:13.365539 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:13.365951 2175536 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5103 chars]
	I0916 11:08:13.366412 2175536 pod_ready.go:93] pod "kube-scheduler-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:08:13.366439 2175536 pod_ready.go:82] duration metric: took 400.396713ms for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:08:13.366452 2175536 pod_ready.go:39] duration metric: took 2.80123186s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:08:13.366466 2175536 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:08:13.366549 2175536 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:08:13.378668 2175536 system_svc.go:56] duration metric: took 12.192528ms WaitForService to wait for kubelet
	I0916 11:08:13.378764 2175536 kubeadm.go:582] duration metric: took 3.44425357s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:08:13.378787 2175536 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:08:13.562293 2175536 request.go:632] Waited for 183.415326ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes
	I0916 11:08:13.562353 2175536 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes
	I0916 11:08:13.562359 2175536 round_trippers.go:469] Request Headers:
	I0916 11:08:13.562368 2175536 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:08:13.562377 2175536 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:08:13.565435 2175536 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:08:13.565466 2175536 round_trippers.go:577] Response Headers:
	I0916 11:08:13.565486 2175536 round_trippers.go:580]     Audit-Id: c7358d21-64a7-4fae-bed0-f5f7f8c5324c
	I0916 11:08:13.565491 2175536 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:08:13.565494 2175536 round_trippers.go:580]     Content-Type: application/json
	I0916 11:08:13.565535 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:08:13.565544 2175536 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:08:13.565547 2175536 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:08:13 GMT
	I0916 11:08:13.566135 2175536 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"520"},"items":[{"metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"440","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"manag
edFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1", [truncated 11039 chars]
	I0916 11:08:13.566845 2175536 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:08:13.566873 2175536 node_conditions.go:123] node cpu capacity is 2
	I0916 11:08:13.566892 2175536 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:08:13.566897 2175536 node_conditions.go:123] node cpu capacity is 2
	I0916 11:08:13.566902 2175536 node_conditions.go:105] duration metric: took 188.106468ms to run NodePressure ...
	I0916 11:08:13.566914 2175536 start.go:241] waiting for startup goroutines ...
	I0916 11:08:13.566951 2175536 start.go:255] writing updated cluster config ...
	I0916 11:08:13.567260 2175536 ssh_runner.go:195] Run: rm -f paused
	I0916 11:08:13.574763 2175536 out.go:177] * Done! kubectl is now configured to use "multinode-890146" cluster and "default" namespace by default
	E0916 11:08:13.576452 2175536 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	24b8d7a28fb62       89a35e2ebb6b9       About a minute ago   Running             busybox                   0                   b4e431597f321       busybox-7dff88458-hf6zl
	e8a9035126acc       2f6c962e7b831       About a minute ago   Running             coredns                   0                   4d75eb3d0406a       coredns-7c65d6cfc9-vp22b
	0ca1a17f49909       ba04bb24b9575       About a minute ago   Running             storage-provisioner       0                   6e9271efa5691       storage-provisioner
	eccab3e428039       6a23fa8fd2b78       About a minute ago   Running             kindnet-cni               0                   c60887e75f823       kindnet-dbrhk
	88800ca3adcda       24a140c548c07       About a minute ago   Running             kube-proxy                0                   a71ab4f91b123       kube-proxy-fm5qr
	e8e11b0a6506f       27e3830e14027       About a minute ago   Running             etcd                      0                   d29e1a2d28295       etcd-multinode-890146
	305b8895a3440       d3f53a98c0a9d       About a minute ago   Running             kube-apiserver            0                   2812a818d9d32       kube-apiserver-multinode-890146
	424e6c1030bdc       7f8aa378bb47d       About a minute ago   Running             kube-scheduler            0                   8856cda765ae4       kube-scheduler-multinode-890146
	9d6ccf43cf5a5       279f381cb3736       About a minute ago   Running             kube-controller-manager   0                   8ad0b604e598f       kube-controller-manager-multinode-890146
	
	
	==> containerd <==
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.875899574Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.875914483Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.876019738Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.936171123Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-7c65d6cfc9-vp22b,Uid:a6adb735-448b-480b-aba1-3ce4d56c6fc7,Namespace:kube-system,Attempt:0,} returns sandbox id \"4d75eb3d0406a0f1516c1dd540e0be271fd8a5b21b2779e789e502b7efe11eb9\""
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.942107322Z" level=info msg="CreateContainer within sandbox \"4d75eb3d0406a0f1516c1dd540e0be271fd8a5b21b2779e789e502b7efe11eb9\" for container &ContainerMetadata{Name:coredns,Attempt:0,}"
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.961549798Z" level=info msg="CreateContainer within sandbox \"4d75eb3d0406a0f1516c1dd540e0be271fd8a5b21b2779e789e502b7efe11eb9\" for &ContainerMetadata{Name:coredns,Attempt:0,} returns container id \"e8a9035126acce637354968a7c4092e855f0da6d2a3f39d1f94b5795e1d5079b\""
	Sep 16 11:07:52 multinode-890146 containerd[823]: time="2024-09-16T11:07:52.963144161Z" level=info msg="StartContainer for \"e8a9035126acce637354968a7c4092e855f0da6d2a3f39d1f94b5795e1d5079b\""
	Sep 16 11:07:53 multinode-890146 containerd[823]: time="2024-09-16T11:07:53.021587453Z" level=info msg="StartContainer for \"e8a9035126acce637354968a7c4092e855f0da6d2a3f39d1f94b5795e1d5079b\" returns successfully"
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.708375311Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-7dff88458-hf6zl,Uid:8e7abaaa-be47-456f-9980-53cbfcd75f48,Namespace:default,Attempt:0,}"
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.752948331Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.753039104Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.753056466Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.754070729Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.804334443Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-7dff88458-hf6zl,Uid:8e7abaaa-be47-456f-9980-53cbfcd75f48,Namespace:default,Attempt:0,} returns sandbox id \"b4e431597f32168c0494b111ad4e32bd08acefca6857024028a1a1fbe5f51839\""
	Sep 16 11:08:14 multinode-890146 containerd[823]: time="2024-09-16T11:08:14.809715248Z" level=info msg="PullImage \"gcr.io/k8s-minikube/busybox:1.28\""
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.525376043Z" level=info msg="ImageCreate event name:\"gcr.io/k8s-minikube/busybox:1.28\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.526574215Z" level=info msg="stop pulling image gcr.io/k8s-minikube/busybox:1.28: active requests=0, bytes read=766310"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.528358198Z" level=info msg="ImageCreate event name:\"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.531279979Z" level=info msg="ImageCreate event name:\"gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12\"  labels:{key:\"io.cri-containerd.image\"  value:\"managed\"}"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.531999508Z" level=info msg="Pulled image \"gcr.io/k8s-minikube/busybox:1.28\" with image id \"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\", repo tag \"gcr.io/k8s-minikube/busybox:1.28\", repo digest \"gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12\", size \"764554\" in 1.722111371s"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.532118227Z" level=info msg="PullImage \"gcr.io/k8s-minikube/busybox:1.28\" returns image reference \"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\""
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.536596645Z" level=info msg="CreateContainer within sandbox \"b4e431597f32168c0494b111ad4e32bd08acefca6857024028a1a1fbe5f51839\" for container &ContainerMetadata{Name:busybox,Attempt:0,}"
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.556009122Z" level=info msg="CreateContainer within sandbox \"b4e431597f32168c0494b111ad4e32bd08acefca6857024028a1a1fbe5f51839\" for &ContainerMetadata{Name:busybox,Attempt:0,} returns container id \"24b8d7a28fb62d5eed5cac16c46c9067bbd582c064a95212b74b18cbaffddfeb\""
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.557019119Z" level=info msg="StartContainer for \"24b8d7a28fb62d5eed5cac16c46c9067bbd582c064a95212b74b18cbaffddfeb\""
	Sep 16 11:08:16 multinode-890146 containerd[823]: time="2024-09-16T11:08:16.613496602Z" level=info msg="StartContainer for \"24b8d7a28fb62d5eed5cac16c46c9067bbd582c064a95212b74b18cbaffddfeb\" returns successfully"
	
	
	==> coredns [e8a9035126acce637354968a7c4092e855f0da6d2a3f39d1f94b5795e1d5079b] <==
	[INFO] 10.244.0.3:52581 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00009111s
	[INFO] 10.244.1.2:42040 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000125882s
	[INFO] 10.244.1.2:35392 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.002402252s
	[INFO] 10.244.1.2:42007 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000093398s
	[INFO] 10.244.1.2:58148 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000099052s
	[INFO] 10.244.1.2:60433 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001076606s
	[INFO] 10.244.1.2:45965 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000079565s
	[INFO] 10.244.1.2:44644 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000090806s
	[INFO] 10.244.1.2:36880 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000072689s
	[INFO] 10.244.0.3:35082 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000098806s
	[INFO] 10.244.0.3:38242 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000109899s
	[INFO] 10.244.0.3:60732 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.00008109s
	[INFO] 10.244.0.3:44313 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000072985s
	[INFO] 10.244.1.2:60136 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000150612s
	[INFO] 10.244.1.2:59383 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000110605s
	[INFO] 10.244.1.2:38945 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000092808s
	[INFO] 10.244.1.2:35665 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000081345s
	[INFO] 10.244.0.3:36942 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000120467s
	[INFO] 10.244.0.3:55441 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000108315s
	[INFO] 10.244.0.3:38725 - 5 "PTR IN 1.58.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000129336s
	[INFO] 10.244.0.3:40340 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000172389s
	[INFO] 10.244.1.2:40345 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000111507s
	[INFO] 10.244.1.2:51062 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.0000773s
	[INFO] 10.244.1.2:40631 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000083733s
	[INFO] 10.244.1.2:39196 - 5 "PTR IN 1.58.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000074067s
	
	
	==> describe nodes <==
	Name:               multinode-890146
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-890146
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-890146
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_07_33_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:07:30 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-890146
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:09:14 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:08:34 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:08:34 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:08:34 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:08:34 +0000   Mon, 16 Sep 2024 11:07:30 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.58.2
	  Hostname:    multinode-890146
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 858ccd60bec74dcb8c460c7772b0d996
	  System UUID:                2cb24a37-7b71-4957-b8fd-d0da5c3f8b7a
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-hf6zl                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         63s
	  kube-system                 coredns-7c65d6cfc9-vp22b                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     100s
	  kube-system                 etcd-multinode-890146                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         104s
	  kube-system                 kindnet-dbrhk                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      100s
	  kube-system                 kube-apiserver-multinode-890146             250m (12%)    0 (0%)      0 (0%)           0 (0%)         104s
	  kube-system                 kube-controller-manager-multinode-890146    200m (10%)    0 (0%)      0 (0%)           0 (0%)         104s
	  kube-system                 kube-proxy-fm5qr                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         100s
	  kube-system                 kube-scheduler-multinode-890146             100m (5%)     0 (0%)      0 (0%)           0 (0%)         106s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         99s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                  From             Message
	  ----     ------                   ----                 ----             -------
	  Normal   Starting                 98s                  kube-proxy       
	  Normal   NodeAllocatableEnforced  112s                 kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 112s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  112s (x8 over 112s)  kubelet          Node multinode-890146 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    112s (x7 over 112s)  kubelet          Node multinode-890146 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     112s (x7 over 112s)  kubelet          Node multinode-890146 status is now: NodeHasSufficientPID
	  Normal   Starting                 112s                 kubelet          Starting kubelet.
	  Normal   Starting                 105s                 kubelet          Starting kubelet.
	  Warning  CgroupV1                 105s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  105s                 kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  104s                 kubelet          Node multinode-890146 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    104s                 kubelet          Node multinode-890146 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     104s                 kubelet          Node multinode-890146 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           101s                 node-controller  Node multinode-890146 event: Registered Node multinode-890146 in Controller
	
	
	Name:               multinode-890146-m02
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-890146-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-890146
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_08_09_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:08:09 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-890146-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:09:10 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:08:39 +0000   Mon, 16 Sep 2024 11:08:09 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:08:39 +0000   Mon, 16 Sep 2024 11:08:09 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:08:39 +0000   Mon, 16 Sep 2024 11:08:09 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:08:39 +0000   Mon, 16 Sep 2024 11:08:10 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.58.3
	  Hostname:    multinode-890146-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 89c2f3455f224166b20833d27ea5ec88
	  System UUID:                afe70f4d-0cb5-4f79-97b8-28a81db2fa30
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-wrnfh    0 (0%)        0 (0%)      0 (0%)           0 (0%)         63s
	  kube-system                 kindnet-4sjj6              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      68s
	  kube-system                 kube-proxy-59f9h           0 (0%)        0 (0%)      0 (0%)           0 (0%)         68s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 65s                kube-proxy       
	  Warning  CgroupV1                 68s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  68s (x2 over 68s)  kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    68s (x2 over 68s)  kubelet          Node multinode-890146-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     68s (x2 over 68s)  kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  68s                kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeReady                67s                kubelet          Node multinode-890146-m02 status is now: NodeReady
	  Normal   RegisteredNode           66s                node-controller  Node multinode-890146-m02 event: Registered Node multinode-890146-m02 in Controller
	
	
	Name:               multinode-890146-m03
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-890146-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-890146
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_08_46_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:08:46 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-890146-m03
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:09:14 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:09:14 +0000   Mon, 16 Sep 2024 11:08:46 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:09:14 +0000   Mon, 16 Sep 2024 11:08:46 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:09:14 +0000   Mon, 16 Sep 2024 11:08:46 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:09:14 +0000   Mon, 16 Sep 2024 11:08:47 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.58.4
	  Hostname:    multinode-890146-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 bf51d66b53ab4a7da7165fe3128c7315
	  System UUID:                04a69dc2-fb3b-47af-837f-29ac0a7025b8
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (2 in total)
	  Namespace                   Name                CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                ------------  ----------  ---------------  -------------  ---
	  kube-system                 kindnet-ndgrk       100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      31s
	  kube-system                 kube-proxy-vl27g    0 (0%)        0 (0%)      0 (0%)           0 (0%)         31s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 28s                kube-proxy       
	  Warning  CgroupV1                 32s                kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   RegisteredNode           31s                node-controller  Node multinode-890146-m03 event: Registered Node multinode-890146-m03 in Controller
	  Normal   NodeHasSufficientMemory  31s (x2 over 31s)  kubelet          Node multinode-890146-m03 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    31s (x2 over 31s)  kubelet          Node multinode-890146-m03 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     31s (x2 over 31s)  kubelet          Node multinode-890146-m03 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  31s                kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeReady                30s                kubelet          Node multinode-890146-m03 status is now: NodeReady
	
	
	==> dmesg <==
	
	
	==> etcd [e8e11b0a6506f9d34c5800c4a5a6bcc8b9f3225a3487a3c437bc87d0b0aaf53d] <==
	{"level":"info","ts":"2024-09-16T11:07:26.598384Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T11:07:26.598542Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:07:26.598739Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:07:26.599020Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"b2c6679ac05f2cf1","initial-advertise-peer-urls":["https://192.168.58.2:2380"],"listen-peer-urls":["https://192.168.58.2:2380"],"advertise-client-urls":["https://192.168.58.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.58.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:07:26.599063Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:07:26.816839Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T11:07:26.816896Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T11:07:26.816925Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgPreVoteResp from b2c6679ac05f2cf1 at term 1"}
	{"level":"info","ts":"2024-09-16T11:07:26.816938Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.816946Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgVoteResp from b2c6679ac05f2cf1 at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.816957Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became leader at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.816966Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: b2c6679ac05f2cf1 elected leader b2c6679ac05f2cf1 at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.818827Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.822083Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"b2c6679ac05f2cf1","local-member-attributes":"{Name:multinode-890146 ClientURLs:[https://192.168.58.2:2379]}","request-path":"/0/members/b2c6679ac05f2cf1/attributes","cluster-id":"3a56e4ca95e2355c","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:07:26.822228Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:07:26.823261Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:07:26.824339Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:07:26.830909Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:07:26.831002Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"3a56e4ca95e2355c","local-member-id":"b2c6679ac05f2cf1","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.831112Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.831144Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.831564Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:07:26.831585Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:07:26.840842Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:07:26.842343Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.58.2:2379"}
	
	
	==> kernel <==
	 11:09:17 up 1 day, 14:51,  0 users,  load average: 1.32, 2.11, 2.13
	Linux multinode-890146 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [eccab3e428039af99fd1b2378ae8fd52f2837469955a8b78fd8b72f906813586] <==
	I0916 11:08:29.337042       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:08:29.337134       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:08:39.346867       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:08:39.347023       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:08:39.347190       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:08:39.347206       1 main.go:299] handling current node
	I0916 11:08:49.340449       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:08:49.340703       1 main.go:299] handling current node
	I0916 11:08:49.340775       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:08:49.340845       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:08:49.341113       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:08:49.341127       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:08:49.341182       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.2.0/24 Src: <nil> Gw: 192.168.58.4 Flags: [] Table: 0} 
	I0916 11:08:59.342099       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:08:59.342136       1 main.go:299] handling current node
	I0916 11:08:59.342153       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:08:59.342160       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:08:59.342285       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:08:59.342291       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:09:09.342281       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:09:09.342413       1 main.go:299] handling current node
	I0916 11:09:09.342453       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:09:09.342484       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:09:09.342714       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:09:09.342761       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	
	
	==> kube-apiserver [305b8895a34401a4626618470969b6ca3b591de13b0d36af0b2b2b23096ac46b] <==
	E0916 11:07:30.119633       1 controller.go:148] "Unhandled Error" err="while syncing ConfigMap \"kube-system/kube-apiserver-legacy-service-account-token-tracking\", err: namespaces \"kube-system\" not found" logger="UnhandledError"
	I0916 11:07:30.130522       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 11:07:30.311421       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:07:30.828773       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0916 11:07:30.837047       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0916 11:07:30.837071       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:07:31.543409       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 11:07:31.599890       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 11:07:31.699349       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 11:07:31.710041       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.58.2]
	I0916 11:07:31.711701       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:07:31.718520       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 11:07:31.982310       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 11:07:32.906044       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 11:07:32.918442       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 11:07:32.931656       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 11:07:36.841575       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 11:07:37.332525       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	E0916 11:08:30.067502       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36938: use of closed network connection
	E0916 11:08:30.295579       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36950: use of closed network connection
	E0916 11:08:30.506756       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36960: use of closed network connection
	E0916 11:08:30.713694       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36984: use of closed network connection
	E0916 11:08:31.123853       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:37034: use of closed network connection
	E0916 11:08:31.466128       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:37056: use of closed network connection
	E0916 11:08:32.100602       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:37082: use of closed network connection
	
	
	==> kube-controller-manager [9d6ccf43cf5a5c28d56e616702330e693dc76d6773c7cc3e02e94f189195689b] <==
	I0916 11:08:14.387728       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="50.768606ms"
	I0916 11:08:14.452119       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="64.332523ms"
	I0916 11:08:14.452218       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="51.766µs"
	I0916 11:08:14.468869       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="65.157µs"
	I0916 11:08:16.989815       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="7.491662ms"
	I0916 11:08:16.989901       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="44.176µs"
	I0916 11:08:29.309361       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="12.824827ms"
	I0916 11:08:29.310058       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="648.874µs"
	I0916 11:08:34.122844       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146"
	I0916 11:08:39.809520       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:08:46.203029       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:08:46.203855       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"multinode-890146-m03\" does not exist"
	I0916 11:08:46.218321       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="multinode-890146-m03" podCIDRs=["10.244.2.0/24"]
	I0916 11:08:46.218361       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.219096       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.235698       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.302636       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.605228       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.989720       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="multinode-890146-m03"
	I0916 11:08:47.080416       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:47.243737       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:08:47.243773       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:47.254901       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:52.008891       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:09:14.825397       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	
	
	==> kube-proxy [88800ca3adcdad421bba0ffcef548a966eeb5c210e5453a2ba8470a9e90ea01e] <==
	I0916 11:07:38.741936       1 server_linux.go:66] "Using iptables proxy"
	I0916 11:07:38.860580       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.58.2"]
	E0916 11:07:38.860641       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 11:07:38.962890       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 11:07:38.962952       1 server_linux.go:169] "Using iptables Proxier"
	I0916 11:07:38.967180       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 11:07:38.968664       1 server.go:483] "Version info" version="v1.31.1"
	I0916 11:07:38.968689       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:07:38.978327       1 config.go:199] "Starting service config controller"
	I0916 11:07:38.978605       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 11:07:38.979008       1 config.go:105] "Starting endpoint slice config controller"
	I0916 11:07:38.979892       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 11:07:38.980656       1 config.go:328] "Starting node config controller"
	I0916 11:07:38.980803       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 11:07:39.083387       1 shared_informer.go:320] Caches are synced for node config
	I0916 11:07:39.083600       1 shared_informer.go:320] Caches are synced for service config
	I0916 11:07:39.083628       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [424e6c1030bdc58751fd76a7652c31e5bd7dff844d888049b87815ddfaecc90b] <==
	I0916 11:07:29.575629       1 serving.go:386] Generated self-signed cert in-memory
	I0916 11:07:32.225115       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 11:07:32.225654       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:07:32.234302       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 11:07:32.234494       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 11:07:32.234575       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 11:07:32.234645       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 11:07:32.244028       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 11:07:32.245351       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 11:07:32.244860       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 11:07:32.254326       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 11:07:32.335551       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 11:07:32.355020       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 11:07:32.355033       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.256142    1515 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\": failed to find network info for sandbox \"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\"" pod="kube-system/coredns-7c65d6cfc9-vp22b"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.256194    1515 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\": failed to find network info for sandbox \"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\"" pod="kube-system/coredns-7c65d6cfc9-vp22b"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.256263    1515 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-vp22b_kube-system(a6adb735-448b-480b-aba1-3ce4d56c6fc7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-vp22b_kube-system(a6adb735-448b-480b-aba1-3ce4d56c6fc7)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\\\": failed to find network info for sandbox \\\"0a9ea55e72ca505abc76bafa32448414335d874337283bd8fae2bd8a3d0c52da\\\"\"" pod="kube-system/coredns-7c65d6cfc9-vp22b" podUID="a6adb735-448b-480b-aba1-3ce4d56c6fc7"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.261111    1515 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\": failed to find network info for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\""
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.261194    1515 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\": failed to find network info for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\"" pod="kube-system/coredns-7c65d6cfc9-bb4db"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.261217    1515 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\": failed to find network info for sandbox \"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\"" pod="kube-system/coredns-7c65d6cfc9-bb4db"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: E0916 11:07:38.261295    1515 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-bb4db_kube-system(3fd53b00-28ef-44ef-8541-097ebc870b2f)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-bb4db_kube-system(3fd53b00-28ef-44ef-8541-097ebc870b2f)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\\\": failed to find network info for sandbox \\\"f3713326a4612464783141aa50e2577a08b3a2324e069578fd5f982014b422cf\\\"\"" pod="kube-system/coredns-7c65d6cfc9-bb4db" podUID="3fd53b00-28ef-44ef-8541-097ebc870b2f"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.906282    1515 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kindnet-dbrhk" podStartSLOduration=1.906263313 podStartE2EDuration="1.906263313s" podCreationTimestamp="2024-09-16 11:07:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:07:38.903272444 +0000 UTC m=+6.207702668" watchObservedRunningTime="2024-09-16 11:07:38.906263313 +0000 UTC m=+6.210693537"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.960520    1515 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-proxy-fm5qr" podStartSLOduration=1.960501425 podStartE2EDuration="1.960501425s" podCreationTimestamp="2024-09-16 11:07:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:07:38.923652778 +0000 UTC m=+6.228082994" watchObservedRunningTime="2024-09-16 11:07:38.960501425 +0000 UTC m=+6.264931641"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.969625    1515 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjcfc\" (UniqueName: \"kubernetes.io/projected/3fd53b00-28ef-44ef-8541-097ebc870b2f-kube-api-access-wjcfc\") pod \"3fd53b00-28ef-44ef-8541-097ebc870b2f\" (UID: \"3fd53b00-28ef-44ef-8541-097ebc870b2f\") "
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.969683    1515 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3fd53b00-28ef-44ef-8541-097ebc870b2f-config-volume\") pod \"3fd53b00-28ef-44ef-8541-097ebc870b2f\" (UID: \"3fd53b00-28ef-44ef-8541-097ebc870b2f\") "
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.969855    1515 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/97795413-5c7a-480b-9cbd-18d4dea5669b-tmp\") pod \"storage-provisioner\" (UID: \"97795413-5c7a-480b-9cbd-18d4dea5669b\") " pod="kube-system/storage-provisioner"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.969899    1515 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kv5x\" (UniqueName: \"kubernetes.io/projected/97795413-5c7a-480b-9cbd-18d4dea5669b-kube-api-access-5kv5x\") pod \"storage-provisioner\" (UID: \"97795413-5c7a-480b-9cbd-18d4dea5669b\") " pod="kube-system/storage-provisioner"
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.970645    1515 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3fd53b00-28ef-44ef-8541-097ebc870b2f-config-volume" (OuterVolumeSpecName: "config-volume") pod "3fd53b00-28ef-44ef-8541-097ebc870b2f" (UID: "3fd53b00-28ef-44ef-8541-097ebc870b2f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue ""
	Sep 16 11:07:38 multinode-890146 kubelet[1515]: I0916 11:07:38.975577    1515 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fd53b00-28ef-44ef-8541-097ebc870b2f-kube-api-access-wjcfc" (OuterVolumeSpecName: "kube-api-access-wjcfc") pod "3fd53b00-28ef-44ef-8541-097ebc870b2f" (UID: "3fd53b00-28ef-44ef-8541-097ebc870b2f"). InnerVolumeSpecName "kube-api-access-wjcfc". PluginName "kubernetes.io/projected", VolumeGidValue ""
	Sep 16 11:07:39 multinode-890146 kubelet[1515]: I0916 11:07:39.071215    1515 reconciler_common.go:288] "Volume detached for volume \"kube-api-access-wjcfc\" (UniqueName: \"kubernetes.io/projected/3fd53b00-28ef-44ef-8541-097ebc870b2f-kube-api-access-wjcfc\") on node \"multinode-890146\" DevicePath \"\""
	Sep 16 11:07:39 multinode-890146 kubelet[1515]: I0916 11:07:39.071430    1515 reconciler_common.go:288] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3fd53b00-28ef-44ef-8541-097ebc870b2f-config-volume\") on node \"multinode-890146\" DevicePath \"\""
	Sep 16 11:07:40 multinode-890146 kubelet[1515]: I0916 11:07:40.627938    1515 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/storage-provisioner" podStartSLOduration=2.627915516 podStartE2EDuration="2.627915516s" podCreationTimestamp="2024-09-16 11:07:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:07:39.937944276 +0000 UTC m=+7.242374500" watchObservedRunningTime="2024-09-16 11:07:40.627915516 +0000 UTC m=+7.932345740"
	Sep 16 11:07:40 multinode-890146 kubelet[1515]: I0916 11:07:40.834319    1515 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fd53b00-28ef-44ef-8541-097ebc870b2f" path="/var/lib/kubelet/pods/3fd53b00-28ef-44ef-8541-097ebc870b2f/volumes"
	Sep 16 11:07:43 multinode-890146 kubelet[1515]: I0916 11:07:43.244514    1515 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 11:07:43 multinode-890146 kubelet[1515]: I0916 11:07:43.245380    1515 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 11:07:53 multinode-890146 kubelet[1515]: I0916 11:07:53.951905    1515 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-7c65d6cfc9-vp22b" podStartSLOduration=16.951884165 podStartE2EDuration="16.951884165s" podCreationTimestamp="2024-09-16 11:07:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2024-09-16 11:07:53.937091831 +0000 UTC m=+21.241522064" watchObservedRunningTime="2024-09-16 11:07:53.951884165 +0000 UTC m=+21.256314381"
	Sep 16 11:08:14 multinode-890146 kubelet[1515]: I0916 11:08:14.526967    1515 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4tvl\" (UniqueName: \"kubernetes.io/projected/8e7abaaa-be47-456f-9980-53cbfcd75f48-kube-api-access-r4tvl\") pod \"busybox-7dff88458-hf6zl\" (UID: \"8e7abaaa-be47-456f-9980-53cbfcd75f48\") " pod="default/busybox-7dff88458-hf6zl"
	Sep 16 11:08:16 multinode-890146 kubelet[1515]: I0916 11:08:16.984853    1515 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="default/busybox-7dff88458-hf6zl" podStartSLOduration=1.257437152 podStartE2EDuration="2.984813644s" podCreationTimestamp="2024-09-16 11:08:14 +0000 UTC" firstStartedPulling="2024-09-16 11:08:14.80585921 +0000 UTC m=+42.110289426" lastFinishedPulling="2024-09-16 11:08:16.533235702 +0000 UTC m=+43.837665918" observedRunningTime="2024-09-16 11:08:16.984406327 +0000 UTC m=+44.288836543" watchObservedRunningTime="2024-09-16 11:08:16.984813644 +0000 UTC m=+44.289243860"
	Sep 16 11:08:30 multinode-890146 kubelet[1515]: E0916 11:08:30.916723    1515 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 192.168.58.2:48600->192.168.58.2:10010: read tcp 192.168.58.2:48600->192.168.58.2:10010: read: connection reset by peer
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p multinode-890146 -n multinode-890146
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (623.776µs)
helpers_test.go:263: kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiNode/serial/StartAfterStop (12.03s)

                                                
                                    
x
+
TestMultiNode/serial/DeleteNode (9.28s)

                                                
                                                
=== RUN   TestMultiNode/serial/DeleteNode
multinode_test.go:416: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 node delete m03
multinode_test.go:416: (dbg) Done: out/minikube-linux-arm64 -p multinode-890146 node delete m03: (4.877410993s)
multinode_test.go:422: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 status --alsologtostderr
multinode_test.go:436: (dbg) Run:  kubectl get nodes
multinode_test.go:436: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (470.496µs)
multinode_test.go:438: failed to run kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiNode/serial/DeleteNode]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect multinode-890146
helpers_test.go:235: (dbg) docker inspect multinode-890146:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb",
	        "Created": "2024-09-16T11:07:09.881207881Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2188725,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:09:44.12038564Z",
	            "FinishedAt": "2024-09-16T11:09:43.31148519Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/hostname",
	        "HostsPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/hosts",
	        "LogPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb-json.log",
	        "Name": "/multinode-890146",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "multinode-890146:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "multinode-890146",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/merged",
	                "UpperDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/diff",
	                "WorkDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "multinode-890146",
	                "Source": "/var/lib/docker/volumes/multinode-890146/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "multinode-890146",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "multinode-890146",
	                "name.minikube.sigs.k8s.io": "multinode-890146",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "d128fe26f1ac9d9df53f5cf51b24a3aec272cf253a98f7172138434c69dd4d24",
	            "SandboxKey": "/var/run/docker/netns/d128fe26f1ac",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40737"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40738"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40741"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40739"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40740"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "multinode-890146": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.58.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:3a:02",
	                    "DriverOpts": null,
	                    "NetworkID": "b138f637362d33b7ccebcd9c06d6cdaa35c434cdf582fc761f98e8246e8681cc",
	                    "EndpointID": "95b1acd051f5ddd401dbc614a09b010505b1d37f193c1772e88e4ddca57695dd",
	                    "Gateway": "192.168.58.1",
	                    "IPAddress": "192.168.58.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "multinode-890146",
	                        "d045dde36e30"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p multinode-890146 -n multinode-890146
helpers_test.go:244: <<< TestMultiNode/serial/DeleteNode FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/DeleteNode]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p multinode-890146 logs -n 25: (2.182190669s)
helpers_test.go:252: TestMultiNode/serial/DeleteNode logs: 
-- stdout --
	
	==> Audit <==
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| Command |                                          Args                                           |     Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| cp      | multinode-890146 cp multinode-890146-m02:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | /tmp/TestMultiNodeserialCopyFile3892048771/001/cp-test_multinode-890146-m02.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m02:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146:/home/docker/cp-test_multinode-890146-m02_multinode-890146.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146 sudo cat                                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | /home/docker/cp-test_multinode-890146-m02_multinode-890146.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m02:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03:/home/docker/cp-test_multinode-890146-m02_multinode-890146-m03.txt |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146-m03 sudo cat                                   | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /home/docker/cp-test_multinode-890146-m02_multinode-890146-m03.txt                      |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp testdata/cp-test.txt                                                | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03:/home/docker/cp-test.txt                                           |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /tmp/TestMultiNodeserialCopyFile3892048771/001/cp-test_multinode-890146-m03.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146:/home/docker/cp-test_multinode-890146-m03_multinode-890146.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146 sudo cat                                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /home/docker/cp-test_multinode-890146-m03_multinode-890146.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m02:/home/docker/cp-test_multinode-890146-m03_multinode-890146-m02.txt |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146-m02 sudo cat                                   | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /home/docker/cp-test_multinode-890146-m03_multinode-890146-m02.txt                      |                  |         |         |                     |                     |
	| node    | multinode-890146 node stop m03                                                          | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	| node    | multinode-890146 node start                                                             | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | m03 -v=7 --alsologtostderr                                                              |                  |         |         |                     |                     |
	| node    | list -p multinode-890146                                                                | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC |                     |
	| stop    | -p multinode-890146                                                                     | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	| start   | -p multinode-890146                                                                     | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:11 UTC |
	|         | --wait=true -v=8                                                                        |                  |         |         |                     |                     |
	|         | --alsologtostderr                                                                       |                  |         |         |                     |                     |
	| node    | list -p multinode-890146                                                                | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC |                     |
	| node    | multinode-890146 node delete                                                            | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | m03                                                                                     |                  |         |         |                     |                     |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:09:43
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:09:43.645119 2188520 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:09:43.645316 2188520 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:09:43.645346 2188520 out.go:358] Setting ErrFile to fd 2...
	I0916 11:09:43.645367 2188520 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:09:43.645651 2188520 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 11:09:43.646055 2188520 out.go:352] Setting JSON to false
	I0916 11:09:43.647094 2188520 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":139926,"bootTime":1726345058,"procs":185,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 11:09:43.647207 2188520 start.go:139] virtualization:  
	I0916 11:09:43.650041 2188520 out.go:177] * [multinode-890146] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:09:43.652835 2188520 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:09:43.652969 2188520 notify.go:220] Checking for updates...
	I0916 11:09:43.657161 2188520 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:09:43.659313 2188520 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:09:43.661308 2188520 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 11:09:43.663092 2188520 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:09:43.665101 2188520 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:09:43.667286 2188520 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:09:43.667382 2188520 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:09:43.688545 2188520 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:09:43.688668 2188520 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:09:43.759707 2188520 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:5 ContainersRunning:2 ContainersPaused:0 ContainersStopped:3 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:54 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 11:09:43.749464115 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:09:43.759822 2188520 docker.go:318] overlay module found
	I0916 11:09:43.762322 2188520 out.go:177] * Using the docker driver based on existing profile
	I0916 11:09:43.764270 2188520 start.go:297] selected driver: docker
	I0916 11:09:43.764288 2188520 start.go:901] validating driver "docker" against &{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName
:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true} {Name:m03 IP:192.168.58.4 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false
kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: Socke
tVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:09:43.764453 2188520 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:09:43.764550 2188520 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:09:43.814981 2188520 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:5 ContainersRunning:2 ContainersPaused:0 ContainersStopped:3 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:54 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 11:09:43.805788648 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:09:43.815428 2188520 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:09:43.815462 2188520 cni.go:84] Creating CNI manager for ""
	I0916 11:09:43.815523 2188520 cni.go:136] multinode detected (3 nodes found), recommending kindnet
	I0916 11:09:43.815582 2188520 start.go:340] cluster config:
	{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true} {Name:m03 IP:192.168.58.4 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-s
erver:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:
0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:09:43.818050 2188520 out.go:177] * Starting "multinode-890146" primary control-plane node in "multinode-890146" cluster
	I0916 11:09:43.820124 2188520 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:09:43.822123 2188520 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:09:43.824010 2188520 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:09:43.824074 2188520 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 11:09:43.824087 2188520 cache.go:56] Caching tarball of preloaded images
	I0916 11:09:43.824096 2188520 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:09:43.824171 2188520 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 11:09:43.824181 2188520 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 11:09:43.824342 2188520 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	W0916 11:09:43.849169 2188520 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:09:43.849188 2188520 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:09:43.849263 2188520 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:09:43.849281 2188520 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:09:43.849286 2188520 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:09:43.849294 2188520 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:09:43.849299 2188520 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:09:43.850820 2188520 image.go:273] response: 
	I0916 11:09:43.963220 2188520 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:09:43.963259 2188520 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:09:43.963290 2188520 start.go:360] acquireMachinesLock for multinode-890146: {Name:mk50282545d8a591b3d758c5d48e2059a356819d Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:09:43.963358 2188520 start.go:364] duration metric: took 42.642µs to acquireMachinesLock for "multinode-890146"
	I0916 11:09:43.963404 2188520 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:09:43.963415 2188520 fix.go:54] fixHost starting: 
	I0916 11:09:43.963707 2188520 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:09:43.989484 2188520 fix.go:112] recreateIfNeeded on multinode-890146: state=Stopped err=<nil>
	W0916 11:09:43.989526 2188520 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:09:43.992156 2188520 out.go:177] * Restarting existing docker container for "multinode-890146" ...
	I0916 11:09:43.994222 2188520 cli_runner.go:164] Run: docker start multinode-890146
	I0916 11:09:44.300718 2188520 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:09:44.323984 2188520 kic.go:430] container "multinode-890146" state is running.
	I0916 11:09:44.324380 2188520 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:09:44.351238 2188520 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:09:44.351466 2188520 machine.go:93] provisionDockerMachine start ...
	I0916 11:09:44.351525 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:09:44.374182 2188520 main.go:141] libmachine: Using SSH client type: native
	I0916 11:09:44.374448 2188520 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40737 <nil> <nil>}
	I0916 11:09:44.374458 2188520 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:09:44.375148 2188520 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:09:47.514231 2188520 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146
	
	I0916 11:09:47.514255 2188520 ubuntu.go:169] provisioning hostname "multinode-890146"
	I0916 11:09:47.514317 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:09:47.531563 2188520 main.go:141] libmachine: Using SSH client type: native
	I0916 11:09:47.531824 2188520 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40737 <nil> <nil>}
	I0916 11:09:47.531841 2188520 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-890146 && echo "multinode-890146" | sudo tee /etc/hostname
	I0916 11:09:47.678636 2188520 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146
	
	I0916 11:09:47.678794 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:09:47.695794 2188520 main.go:141] libmachine: Using SSH client type: native
	I0916 11:09:47.696056 2188520 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40737 <nil> <nil>}
	I0916 11:09:47.696079 2188520 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-890146' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-890146/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-890146' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:09:47.834783 2188520 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:09:47.834811 2188520 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 11:09:47.834834 2188520 ubuntu.go:177] setting up certificates
	I0916 11:09:47.834844 2188520 provision.go:84] configureAuth start
	I0916 11:09:47.834915 2188520 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:09:47.854551 2188520 provision.go:143] copyHostCerts
	I0916 11:09:47.854598 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:09:47.854631 2188520 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 11:09:47.854642 2188520 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:09:47.854755 2188520 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 11:09:47.854846 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:09:47.854868 2188520 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 11:09:47.854873 2188520 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:09:47.854907 2188520 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 11:09:47.854954 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:09:47.854976 2188520 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 11:09:47.854983 2188520 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:09:47.855010 2188520 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 11:09:47.855063 2188520 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.multinode-890146 san=[127.0.0.1 192.168.58.2 localhost minikube multinode-890146]
	I0916 11:09:48.709766 2188520 provision.go:177] copyRemoteCerts
	I0916 11:09:48.709843 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:09:48.709887 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:09:48.726265 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40737 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:09:48.824965 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:09:48.825035 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 11:09:48.849319 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:09:48.849386 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1216 bytes)
	I0916 11:09:48.873632 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:09:48.873694 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:09:48.898396 2188520 provision.go:87] duration metric: took 1.063537558s to configureAuth
	I0916 11:09:48.898422 2188520 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:09:48.898660 2188520 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:09:48.898668 2188520 machine.go:96] duration metric: took 4.547195088s to provisionDockerMachine
	I0916 11:09:48.898729 2188520 start.go:293] postStartSetup for "multinode-890146" (driver="docker")
	I0916 11:09:48.898743 2188520 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:09:48.898802 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:09:48.898849 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:09:48.915124 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40737 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:09:49.014269 2188520 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:09:49.017538 2188520 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:09:49.017558 2188520 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:09:49.017565 2188520 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:09:49.017570 2188520 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:09:49.017575 2188520 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:09:49.017579 2188520 command_runner.go:130] > ID=ubuntu
	I0916 11:09:49.017582 2188520 command_runner.go:130] > ID_LIKE=debian
	I0916 11:09:49.017586 2188520 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:09:49.017591 2188520 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:09:49.017597 2188520 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:09:49.017605 2188520 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:09:49.017609 2188520 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:09:49.017659 2188520 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:09:49.017687 2188520 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:09:49.017705 2188520 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:09:49.017713 2188520 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:09:49.017727 2188520 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 11:09:49.017787 2188520 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 11:09:49.017874 2188520 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 11:09:49.017887 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 11:09:49.017989 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:09:49.026382 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:09:49.050761 2188520 start.go:296] duration metric: took 152.012697ms for postStartSetup
	I0916 11:09:49.050844 2188520 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:09:49.050895 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:09:49.067981 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40737 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:09:49.163500 2188520 command_runner.go:130] > 22%
	I0916 11:09:49.163573 2188520 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:09:49.169245 2188520 command_runner.go:130] > 153G
	I0916 11:09:49.169279 2188520 fix.go:56] duration metric: took 5.205862364s for fixHost
	I0916 11:09:49.169290 2188520 start.go:83] releasing machines lock for "multinode-890146", held for 5.205918906s
	I0916 11:09:49.169358 2188520 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:09:49.185979 2188520 ssh_runner.go:195] Run: cat /version.json
	I0916 11:09:49.186040 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:09:49.186291 2188520 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:09:49.186359 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:09:49.205000 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40737 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:09:49.212287 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40737 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:09:49.298092 2188520 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 11:09:49.298264 2188520 ssh_runner.go:195] Run: systemctl --version
	I0916 11:09:49.427766 2188520 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:09:49.431069 2188520 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 11:09:49.431129 2188520 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 11:09:49.431200 2188520 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:09:49.435257 2188520 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:09:49.435282 2188520 command_runner.go:130] >   Size: 78        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:09:49.435289 2188520 command_runner.go:130] > Device: 3ch/60d	Inode: 1324613     Links: 1
	I0916 11:09:49.435296 2188520 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:09:49.435302 2188520 command_runner.go:130] > Access: 2024-09-16 11:07:13.844365694 +0000
	I0916 11:09:49.435308 2188520 command_runner.go:130] > Modify: 2024-09-16 11:07:13.816365837 +0000
	I0916 11:09:49.435318 2188520 command_runner.go:130] > Change: 2024-09-16 11:07:13.816365837 +0000
	I0916 11:09:49.435323 2188520 command_runner.go:130] >  Birth: 2024-09-16 11:07:13.816365837 +0000
	I0916 11:09:49.435712 2188520 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 11:09:49.453663 2188520 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:09:49.453744 2188520 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:09:49.462456 2188520 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:09:49.462480 2188520 start.go:495] detecting cgroup driver to use...
	I0916 11:09:49.462514 2188520 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:09:49.462572 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 11:09:49.476947 2188520 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 11:09:49.488937 2188520 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:09:49.489042 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:09:49.502232 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:09:49.514514 2188520 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:09:49.609148 2188520 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:09:49.690505 2188520 docker.go:233] disabling docker service ...
	I0916 11:09:49.690632 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:09:49.703701 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:09:49.715932 2188520 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:09:49.819031 2188520 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:09:49.912327 2188520 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:09:49.924002 2188520 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:09:49.939667 2188520 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0916 11:09:49.941188 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 11:09:49.951473 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 11:09:49.961771 2188520 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 11:09:49.961897 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 11:09:49.972682 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:09:49.982937 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 11:09:49.993812 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:09:50.008542 2188520 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:09:50.024763 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 11:09:50.036601 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 11:09:50.048057 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 11:09:50.058939 2188520 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:09:50.066899 2188520 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:09:50.068003 2188520 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:09:50.076788 2188520 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:09:50.168115 2188520 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 11:09:50.315629 2188520 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 11:09:50.315733 2188520 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 11:09:50.319582 2188520 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0916 11:09:50.319607 2188520 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:09:50.319615 2188520 command_runner.go:130] > Device: 45h/69d	Inode: 160         Links: 1
	I0916 11:09:50.319622 2188520 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:09:50.319648 2188520 command_runner.go:130] > Access: 2024-09-16 11:09:50.243549151 +0000
	I0916 11:09:50.319656 2188520 command_runner.go:130] > Modify: 2024-09-16 11:09:50.243549151 +0000
	I0916 11:09:50.319667 2188520 command_runner.go:130] > Change: 2024-09-16 11:09:50.243549151 +0000
	I0916 11:09:50.319670 2188520 command_runner.go:130] >  Birth: -
	I0916 11:09:50.319919 2188520 start.go:563] Will wait 60s for crictl version
	I0916 11:09:50.320000 2188520 ssh_runner.go:195] Run: which crictl
	I0916 11:09:50.323254 2188520 command_runner.go:130] > /usr/bin/crictl
	I0916 11:09:50.323873 2188520 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:09:50.363201 2188520 command_runner.go:130] > Version:  0.1.0
	I0916 11:09:50.363463 2188520 command_runner.go:130] > RuntimeName:  containerd
	I0916 11:09:50.363642 2188520 command_runner.go:130] > RuntimeVersion:  1.7.22
	I0916 11:09:50.363824 2188520 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:09:50.369338 2188520 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 11:09:50.369441 2188520 ssh_runner.go:195] Run: containerd --version
	I0916 11:09:50.389949 2188520 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:09:50.391833 2188520 ssh_runner.go:195] Run: containerd --version
	I0916 11:09:50.416089 2188520 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:09:50.420861 2188520 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 11:09:50.423136 2188520 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:09:50.437535 2188520 ssh_runner.go:195] Run: grep 192.168.58.1	host.minikube.internal$ /etc/hosts
	I0916 11:09:50.441273 2188520 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.58.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:09:50.452128 2188520 kubeadm.go:883] updating cluster {Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true} {Name:m03 IP:192.168.58.4 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubefl
ow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetC
lientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:09:50.452293 2188520 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:09:50.452352 2188520 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:09:50.487271 2188520 command_runner.go:130] > {
	I0916 11:09:50.487297 2188520 command_runner.go:130] >   "images": [
	I0916 11:09:50.487302 2188520 command_runner.go:130] >     {
	I0916 11:09:50.487311 2188520 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:09:50.487317 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.487322 2188520 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:09:50.487325 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487329 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.487338 2188520 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:09:50.487344 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487349 2188520 command_runner.go:130] >       "size": "33309097",
	I0916 11:09:50.487352 2188520 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.487356 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.487365 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.487369 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.487373 2188520 command_runner.go:130] >     },
	I0916 11:09:50.487381 2188520 command_runner.go:130] >     {
	I0916 11:09:50.487390 2188520 command_runner.go:130] >       "id": "sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd",
	I0916 11:09:50.487397 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.487402 2188520 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox:1.28"
	I0916 11:09:50.487408 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487413 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.487426 2188520 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12"
	I0916 11:09:50.487429 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487435 2188520 command_runner.go:130] >       "size": "764554",
	I0916 11:09:50.487438 2188520 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.487446 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.487450 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.487457 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.487462 2188520 command_runner.go:130] >     },
	I0916 11:09:50.487469 2188520 command_runner.go:130] >     {
	I0916 11:09:50.487477 2188520 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:09:50.487484 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.487491 2188520 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:09:50.487497 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487502 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.487511 2188520 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:09:50.487519 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487524 2188520 command_runner.go:130] >       "size": "8034419",
	I0916 11:09:50.487527 2188520 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.487531 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.487540 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.487548 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.487551 2188520 command_runner.go:130] >     },
	I0916 11:09:50.487554 2188520 command_runner.go:130] >     {
	I0916 11:09:50.487566 2188520 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:09:50.487573 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.487579 2188520 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:09:50.487585 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487589 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.487597 2188520 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:09:50.487601 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487605 2188520 command_runner.go:130] >       "size": "16948420",
	I0916 11:09:50.487611 2188520 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.487615 2188520 command_runner.go:130] >       "username": "nonroot",
	I0916 11:09:50.487622 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.487626 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.487629 2188520 command_runner.go:130] >     },
	I0916 11:09:50.487632 2188520 command_runner.go:130] >     {
	I0916 11:09:50.487639 2188520 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:09:50.487645 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.487650 2188520 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:09:50.487657 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487662 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.487674 2188520 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 11:09:50.487680 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487684 2188520 command_runner.go:130] >       "size": "66535646",
	I0916 11:09:50.487687 2188520 command_runner.go:130] >       "uid": {
	I0916 11:09:50.487694 2188520 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.487698 2188520 command_runner.go:130] >       },
	I0916 11:09:50.487706 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.487710 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.487718 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.487722 2188520 command_runner.go:130] >     },
	I0916 11:09:50.487730 2188520 command_runner.go:130] >     {
	I0916 11:09:50.487737 2188520 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:09:50.487744 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.487749 2188520 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:09:50.487752 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487756 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.487766 2188520 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 11:09:50.487770 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487777 2188520 command_runner.go:130] >       "size": "25687130",
	I0916 11:09:50.487780 2188520 command_runner.go:130] >       "uid": {
	I0916 11:09:50.487788 2188520 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.487792 2188520 command_runner.go:130] >       },
	I0916 11:09:50.487796 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.487803 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.487808 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.487814 2188520 command_runner.go:130] >     },
	I0916 11:09:50.487818 2188520 command_runner.go:130] >     {
	I0916 11:09:50.487829 2188520 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:09:50.487837 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.487842 2188520 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:09:50.487845 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487850 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.487861 2188520 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 11:09:50.487867 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487871 2188520 command_runner.go:130] >       "size": "23948670",
	I0916 11:09:50.487875 2188520 command_runner.go:130] >       "uid": {
	I0916 11:09:50.487883 2188520 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.487886 2188520 command_runner.go:130] >       },
	I0916 11:09:50.487894 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.487898 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.487905 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.487909 2188520 command_runner.go:130] >     },
	I0916 11:09:50.487916 2188520 command_runner.go:130] >     {
	I0916 11:09:50.487923 2188520 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:09:50.487927 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.487932 2188520 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:09:50.487940 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487943 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.487951 2188520 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 11:09:50.487958 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.487962 2188520 command_runner.go:130] >       "size": "26756812",
	I0916 11:09:50.487969 2188520 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.487973 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.487980 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.487986 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.487992 2188520 command_runner.go:130] >     },
	I0916 11:09:50.487995 2188520 command_runner.go:130] >     {
	I0916 11:09:50.488002 2188520 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:09:50.488008 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.488013 2188520 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:09:50.488018 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.488022 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.488034 2188520 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:09:50.488038 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.488047 2188520 command_runner.go:130] >       "size": "18507674",
	I0916 11:09:50.488051 2188520 command_runner.go:130] >       "uid": {
	I0916 11:09:50.488059 2188520 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.488062 2188520 command_runner.go:130] >       },
	I0916 11:09:50.488070 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.488074 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.488081 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.488084 2188520 command_runner.go:130] >     },
	I0916 11:09:50.488087 2188520 command_runner.go:130] >     {
	I0916 11:09:50.488099 2188520 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:09:50.488106 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.488110 2188520 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:09:50.488114 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.488118 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.488130 2188520 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:09:50.488136 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.488141 2188520 command_runner.go:130] >       "size": "267933",
	I0916 11:09:50.488158 2188520 command_runner.go:130] >       "uid": {
	I0916 11:09:50.488164 2188520 command_runner.go:130] >         "value": "65535"
	I0916 11:09:50.488167 2188520 command_runner.go:130] >       },
	I0916 11:09:50.488171 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.488174 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.488181 2188520 command_runner.go:130] >       "pinned": true
	I0916 11:09:50.488184 2188520 command_runner.go:130] >     }
	I0916 11:09:50.488188 2188520 command_runner.go:130] >   ]
	I0916 11:09:50.488196 2188520 command_runner.go:130] > }
	I0916 11:09:50.490772 2188520 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 11:09:50.490794 2188520 containerd.go:534] Images already preloaded, skipping extraction
	I0916 11:09:50.490859 2188520 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:09:50.523851 2188520 command_runner.go:130] > {
	I0916 11:09:50.523891 2188520 command_runner.go:130] >   "images": [
	I0916 11:09:50.523897 2188520 command_runner.go:130] >     {
	I0916 11:09:50.523907 2188520 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:09:50.523913 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.523918 2188520 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:09:50.523922 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.523933 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.523946 2188520 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:09:50.523950 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.523955 2188520 command_runner.go:130] >       "size": "33309097",
	I0916 11:09:50.523960 2188520 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.523967 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.523972 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.523976 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.523985 2188520 command_runner.go:130] >     },
	I0916 11:09:50.523988 2188520 command_runner.go:130] >     {
	I0916 11:09:50.523998 2188520 command_runner.go:130] >       "id": "sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd",
	I0916 11:09:50.524006 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.524011 2188520 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox:1.28"
	I0916 11:09:50.524014 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524018 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.524027 2188520 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12"
	I0916 11:09:50.524033 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524038 2188520 command_runner.go:130] >       "size": "764554",
	I0916 11:09:50.524045 2188520 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.524049 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.524054 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.524062 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.524065 2188520 command_runner.go:130] >     },
	I0916 11:09:50.524073 2188520 command_runner.go:130] >     {
	I0916 11:09:50.524080 2188520 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:09:50.524088 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.524094 2188520 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:09:50.524097 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524102 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.524110 2188520 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:09:50.524126 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524134 2188520 command_runner.go:130] >       "size": "8034419",
	I0916 11:09:50.524138 2188520 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.524149 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.524156 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.524161 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.524168 2188520 command_runner.go:130] >     },
	I0916 11:09:50.524171 2188520 command_runner.go:130] >     {
	I0916 11:09:50.524178 2188520 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:09:50.524182 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.524187 2188520 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:09:50.524195 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524199 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.524207 2188520 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:09:50.524213 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524217 2188520 command_runner.go:130] >       "size": "16948420",
	I0916 11:09:50.524225 2188520 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.524229 2188520 command_runner.go:130] >       "username": "nonroot",
	I0916 11:09:50.524236 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.524240 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.524247 2188520 command_runner.go:130] >     },
	I0916 11:09:50.524250 2188520 command_runner.go:130] >     {
	I0916 11:09:50.524257 2188520 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:09:50.524261 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.524265 2188520 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:09:50.524271 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524275 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.524286 2188520 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 11:09:50.524293 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524300 2188520 command_runner.go:130] >       "size": "66535646",
	I0916 11:09:50.524304 2188520 command_runner.go:130] >       "uid": {
	I0916 11:09:50.524311 2188520 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.524315 2188520 command_runner.go:130] >       },
	I0916 11:09:50.524322 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.524326 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.524332 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.524338 2188520 command_runner.go:130] >     },
	I0916 11:09:50.524341 2188520 command_runner.go:130] >     {
	I0916 11:09:50.524348 2188520 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:09:50.524356 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.524361 2188520 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:09:50.524365 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524368 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.524376 2188520 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 11:09:50.524379 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524384 2188520 command_runner.go:130] >       "size": "25687130",
	I0916 11:09:50.524388 2188520 command_runner.go:130] >       "uid": {
	I0916 11:09:50.524392 2188520 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.524396 2188520 command_runner.go:130] >       },
	I0916 11:09:50.524400 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.524404 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.524408 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.524410 2188520 command_runner.go:130] >     },
	I0916 11:09:50.524413 2188520 command_runner.go:130] >     {
	I0916 11:09:50.524422 2188520 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:09:50.524426 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.524431 2188520 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:09:50.524435 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524439 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.524452 2188520 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 11:09:50.524456 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524462 2188520 command_runner.go:130] >       "size": "23948670",
	I0916 11:09:50.524466 2188520 command_runner.go:130] >       "uid": {
	I0916 11:09:50.524470 2188520 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.524478 2188520 command_runner.go:130] >       },
	I0916 11:09:50.524482 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.524486 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.524494 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.524498 2188520 command_runner.go:130] >     },
	I0916 11:09:50.524502 2188520 command_runner.go:130] >     {
	I0916 11:09:50.524513 2188520 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:09:50.524521 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.524525 2188520 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:09:50.524529 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524532 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.524540 2188520 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 11:09:50.524547 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524550 2188520 command_runner.go:130] >       "size": "26756812",
	I0916 11:09:50.524554 2188520 command_runner.go:130] >       "uid": null,
	I0916 11:09:50.524562 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.524566 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.524574 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.524577 2188520 command_runner.go:130] >     },
	I0916 11:09:50.524585 2188520 command_runner.go:130] >     {
	I0916 11:09:50.524591 2188520 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:09:50.524598 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.524603 2188520 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:09:50.524607 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524613 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.524621 2188520 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:09:50.524626 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524631 2188520 command_runner.go:130] >       "size": "18507674",
	I0916 11:09:50.524637 2188520 command_runner.go:130] >       "uid": {
	I0916 11:09:50.524642 2188520 command_runner.go:130] >         "value": "0"
	I0916 11:09:50.524645 2188520 command_runner.go:130] >       },
	I0916 11:09:50.524653 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.524657 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.524667 2188520 command_runner.go:130] >       "pinned": false
	I0916 11:09:50.524675 2188520 command_runner.go:130] >     },
	I0916 11:09:50.524679 2188520 command_runner.go:130] >     {
	I0916 11:09:50.524691 2188520 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:09:50.524697 2188520 command_runner.go:130] >       "repoTags": [
	I0916 11:09:50.524704 2188520 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:09:50.524712 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524716 2188520 command_runner.go:130] >       "repoDigests": [
	I0916 11:09:50.524724 2188520 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:09:50.524730 2188520 command_runner.go:130] >       ],
	I0916 11:09:50.524734 2188520 command_runner.go:130] >       "size": "267933",
	I0916 11:09:50.524738 2188520 command_runner.go:130] >       "uid": {
	I0916 11:09:50.524746 2188520 command_runner.go:130] >         "value": "65535"
	I0916 11:09:50.524751 2188520 command_runner.go:130] >       },
	I0916 11:09:50.524758 2188520 command_runner.go:130] >       "username": "",
	I0916 11:09:50.524763 2188520 command_runner.go:130] >       "spec": null,
	I0916 11:09:50.524772 2188520 command_runner.go:130] >       "pinned": true
	I0916 11:09:50.524775 2188520 command_runner.go:130] >     }
	I0916 11:09:50.524778 2188520 command_runner.go:130] >   ]
	I0916 11:09:50.524781 2188520 command_runner.go:130] > }
	I0916 11:09:50.527421 2188520 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 11:09:50.527443 2188520 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:09:50.527451 2188520 kubeadm.go:934] updating node { 192.168.58.2 8443 v1.31.1 containerd true true} ...
	I0916 11:09:50.527560 2188520 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-890146 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.58.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:09:50.527632 2188520 ssh_runner.go:195] Run: sudo crictl info
	I0916 11:09:50.564008 2188520 command_runner.go:130] > {
	I0916 11:09:50.564030 2188520 command_runner.go:130] >   "status": {
	I0916 11:09:50.564036 2188520 command_runner.go:130] >     "conditions": [
	I0916 11:09:50.564040 2188520 command_runner.go:130] >       {
	I0916 11:09:50.564046 2188520 command_runner.go:130] >         "type": "RuntimeReady",
	I0916 11:09:50.564050 2188520 command_runner.go:130] >         "status": true,
	I0916 11:09:50.564054 2188520 command_runner.go:130] >         "reason": "",
	I0916 11:09:50.564064 2188520 command_runner.go:130] >         "message": ""
	I0916 11:09:50.564067 2188520 command_runner.go:130] >       },
	I0916 11:09:50.564072 2188520 command_runner.go:130] >       {
	I0916 11:09:50.564078 2188520 command_runner.go:130] >         "type": "NetworkReady",
	I0916 11:09:50.564083 2188520 command_runner.go:130] >         "status": true,
	I0916 11:09:50.564095 2188520 command_runner.go:130] >         "reason": "",
	I0916 11:09:50.564099 2188520 command_runner.go:130] >         "message": ""
	I0916 11:09:50.564102 2188520 command_runner.go:130] >       },
	I0916 11:09:50.564106 2188520 command_runner.go:130] >       {
	I0916 11:09:50.564111 2188520 command_runner.go:130] >         "type": "ContainerdHasNoDeprecationWarnings",
	I0916 11:09:50.564116 2188520 command_runner.go:130] >         "status": true,
	I0916 11:09:50.564125 2188520 command_runner.go:130] >         "reason": "",
	I0916 11:09:50.564130 2188520 command_runner.go:130] >         "message": ""
	I0916 11:09:50.564137 2188520 command_runner.go:130] >       }
	I0916 11:09:50.564140 2188520 command_runner.go:130] >     ]
	I0916 11:09:50.564143 2188520 command_runner.go:130] >   },
	I0916 11:09:50.564146 2188520 command_runner.go:130] >   "cniconfig": {
	I0916 11:09:50.564151 2188520 command_runner.go:130] >     "PluginDirs": [
	I0916 11:09:50.564156 2188520 command_runner.go:130] >       "/opt/cni/bin"
	I0916 11:09:50.564160 2188520 command_runner.go:130] >     ],
	I0916 11:09:50.564174 2188520 command_runner.go:130] >     "PluginConfDir": "/etc/cni/net.d",
	I0916 11:09:50.564178 2188520 command_runner.go:130] >     "PluginMaxConfNum": 1,
	I0916 11:09:50.564182 2188520 command_runner.go:130] >     "Prefix": "eth",
	I0916 11:09:50.564190 2188520 command_runner.go:130] >     "Networks": [
	I0916 11:09:50.564194 2188520 command_runner.go:130] >       {
	I0916 11:09:50.564198 2188520 command_runner.go:130] >         "Config": {
	I0916 11:09:50.564206 2188520 command_runner.go:130] >           "Name": "cni-loopback",
	I0916 11:09:50.564211 2188520 command_runner.go:130] >           "CNIVersion": "0.3.1",
	I0916 11:09:50.564221 2188520 command_runner.go:130] >           "Plugins": [
	I0916 11:09:50.564224 2188520 command_runner.go:130] >             {
	I0916 11:09:50.564228 2188520 command_runner.go:130] >               "Network": {
	I0916 11:09:50.564233 2188520 command_runner.go:130] >                 "type": "loopback",
	I0916 11:09:50.564239 2188520 command_runner.go:130] >                 "ipam": {},
	I0916 11:09:50.564244 2188520 command_runner.go:130] >                 "dns": {}
	I0916 11:09:50.564251 2188520 command_runner.go:130] >               },
	I0916 11:09:50.564255 2188520 command_runner.go:130] >               "Source": "{\"type\":\"loopback\"}"
	I0916 11:09:50.564259 2188520 command_runner.go:130] >             }
	I0916 11:09:50.564263 2188520 command_runner.go:130] >           ],
	I0916 11:09:50.564277 2188520 command_runner.go:130] >           "Source": "{\n\"cniVersion\": \"0.3.1\",\n\"name\": \"cni-loopback\",\n\"plugins\": [{\n  \"type\": \"loopback\"\n}]\n}"
	I0916 11:09:50.564285 2188520 command_runner.go:130] >         },
	I0916 11:09:50.564289 2188520 command_runner.go:130] >         "IFName": "lo"
	I0916 11:09:50.564292 2188520 command_runner.go:130] >       },
	I0916 11:09:50.564295 2188520 command_runner.go:130] >       {
	I0916 11:09:50.564301 2188520 command_runner.go:130] >         "Config": {
	I0916 11:09:50.564305 2188520 command_runner.go:130] >           "Name": "kindnet",
	I0916 11:09:50.564312 2188520 command_runner.go:130] >           "CNIVersion": "0.3.1",
	I0916 11:09:50.564316 2188520 command_runner.go:130] >           "Plugins": [
	I0916 11:09:50.564320 2188520 command_runner.go:130] >             {
	I0916 11:09:50.564324 2188520 command_runner.go:130] >               "Network": {
	I0916 11:09:50.564331 2188520 command_runner.go:130] >                 "type": "ptp",
	I0916 11:09:50.564335 2188520 command_runner.go:130] >                 "ipam": {
	I0916 11:09:50.564342 2188520 command_runner.go:130] >                   "type": "host-local"
	I0916 11:09:50.564346 2188520 command_runner.go:130] >                 },
	I0916 11:09:50.564349 2188520 command_runner.go:130] >                 "dns": {}
	I0916 11:09:50.564353 2188520 command_runner.go:130] >               },
	I0916 11:09:50.564367 2188520 command_runner.go:130] >               "Source": "{\"ipMasq\":false,\"ipam\":{\"dataDir\":\"/run/cni-ipam-state\",\"ranges\":[[{\"subnet\":\"10.244.0.0/24\"}]],\"routes\":[{\"dst\":\"0.0.0.0/0\"}],\"type\":\"host-local\"},\"mtu\":1500,\"type\":\"ptp\"}"
	I0916 11:09:50.564374 2188520 command_runner.go:130] >             },
	I0916 11:09:50.564379 2188520 command_runner.go:130] >             {
	I0916 11:09:50.564383 2188520 command_runner.go:130] >               "Network": {
	I0916 11:09:50.564394 2188520 command_runner.go:130] >                 "type": "portmap",
	I0916 11:09:50.564405 2188520 command_runner.go:130] >                 "capabilities": {
	I0916 11:09:50.564414 2188520 command_runner.go:130] >                   "portMappings": true
	I0916 11:09:50.564418 2188520 command_runner.go:130] >                 },
	I0916 11:09:50.564422 2188520 command_runner.go:130] >                 "ipam": {},
	I0916 11:09:50.564426 2188520 command_runner.go:130] >                 "dns": {}
	I0916 11:09:50.564434 2188520 command_runner.go:130] >               },
	I0916 11:09:50.564442 2188520 command_runner.go:130] >               "Source": "{\"capabilities\":{\"portMappings\":true},\"type\":\"portmap\"}"
	I0916 11:09:50.564448 2188520 command_runner.go:130] >             }
	I0916 11:09:50.564451 2188520 command_runner.go:130] >           ],
	I0916 11:09:50.564484 2188520 command_runner.go:130] >           "Source": "\n{\n\t\"cniVersion\": \"0.3.1\",\n\t\"name\": \"kindnet\",\n\t\"plugins\": [\n\t{\n\t\t\"type\": \"ptp\",\n\t\t\"ipMasq\": false,\n\t\t\"ipam\": {\n\t\t\t\"type\": \"host-local\",\n\t\t\t\"dataDir\": \"/run/cni-ipam-state\",\n\t\t\t\"routes\": [\n\t\t\t\t\n\t\t\t\t\n\t\t\t\t{ \"dst\": \"0.0.0.0/0\" }\n\t\t\t],\n\t\t\t\"ranges\": [\n\t\t\t\t\n\t\t\t\t\n\t\t\t\t[ { \"subnet\": \"10.244.0.0/24\" } ]\n\t\t\t]\n\t\t}\n\t\t,\n\t\t\"mtu\": 1500\n\t\t\n\t},\n\t{\n\t\t\"type\": \"portmap\",\n\t\t\"capabilities\": {\n\t\t\t\"portMappings\": true\n\t\t}\n\t}\n\t]\n}\n"
	I0916 11:09:50.564492 2188520 command_runner.go:130] >         },
	I0916 11:09:50.564497 2188520 command_runner.go:130] >         "IFName": "eth0"
	I0916 11:09:50.564501 2188520 command_runner.go:130] >       }
	I0916 11:09:50.564505 2188520 command_runner.go:130] >     ]
	I0916 11:09:50.564507 2188520 command_runner.go:130] >   },
	I0916 11:09:50.564511 2188520 command_runner.go:130] >   "config": {
	I0916 11:09:50.564515 2188520 command_runner.go:130] >     "containerd": {
	I0916 11:09:50.564523 2188520 command_runner.go:130] >       "snapshotter": "overlayfs",
	I0916 11:09:50.564528 2188520 command_runner.go:130] >       "defaultRuntimeName": "runc",
	I0916 11:09:50.564535 2188520 command_runner.go:130] >       "defaultRuntime": {
	I0916 11:09:50.564539 2188520 command_runner.go:130] >         "runtimeType": "",
	I0916 11:09:50.564542 2188520 command_runner.go:130] >         "runtimePath": "",
	I0916 11:09:50.564547 2188520 command_runner.go:130] >         "runtimeEngine": "",
	I0916 11:09:50.564554 2188520 command_runner.go:130] >         "PodAnnotations": null,
	I0916 11:09:50.564558 2188520 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 11:09:50.564563 2188520 command_runner.go:130] >         "runtimeRoot": "",
	I0916 11:09:50.564569 2188520 command_runner.go:130] >         "options": null,
	I0916 11:09:50.564575 2188520 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 11:09:50.564583 2188520 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:09:50.564588 2188520 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 11:09:50.564592 2188520 command_runner.go:130] >         "cniConfDir": "",
	I0916 11:09:50.564596 2188520 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 11:09:50.564601 2188520 command_runner.go:130] >         "snapshotter": "",
	I0916 11:09:50.564607 2188520 command_runner.go:130] >         "sandboxMode": ""
	I0916 11:09:50.564611 2188520 command_runner.go:130] >       },
	I0916 11:09:50.564618 2188520 command_runner.go:130] >       "untrustedWorkloadRuntime": {
	I0916 11:09:50.564622 2188520 command_runner.go:130] >         "runtimeType": "",
	I0916 11:09:50.564633 2188520 command_runner.go:130] >         "runtimePath": "",
	I0916 11:09:50.564637 2188520 command_runner.go:130] >         "runtimeEngine": "",
	I0916 11:09:50.564642 2188520 command_runner.go:130] >         "PodAnnotations": null,
	I0916 11:09:50.564649 2188520 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 11:09:50.564653 2188520 command_runner.go:130] >         "runtimeRoot": "",
	I0916 11:09:50.564658 2188520 command_runner.go:130] >         "options": null,
	I0916 11:09:50.564666 2188520 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 11:09:50.564676 2188520 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:09:50.564680 2188520 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 11:09:50.564683 2188520 command_runner.go:130] >         "cniConfDir": "",
	I0916 11:09:50.564687 2188520 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 11:09:50.564692 2188520 command_runner.go:130] >         "snapshotter": "",
	I0916 11:09:50.564699 2188520 command_runner.go:130] >         "sandboxMode": ""
	I0916 11:09:50.564702 2188520 command_runner.go:130] >       },
	I0916 11:09:50.564708 2188520 command_runner.go:130] >       "runtimes": {
	I0916 11:09:50.564712 2188520 command_runner.go:130] >         "runc": {
	I0916 11:09:50.564723 2188520 command_runner.go:130] >           "runtimeType": "io.containerd.runc.v2",
	I0916 11:09:50.564727 2188520 command_runner.go:130] >           "runtimePath": "",
	I0916 11:09:50.564732 2188520 command_runner.go:130] >           "runtimeEngine": "",
	I0916 11:09:50.564738 2188520 command_runner.go:130] >           "PodAnnotations": null,
	I0916 11:09:50.564743 2188520 command_runner.go:130] >           "ContainerAnnotations": null,
	I0916 11:09:50.564747 2188520 command_runner.go:130] >           "runtimeRoot": "",
	I0916 11:09:50.564753 2188520 command_runner.go:130] >           "options": {
	I0916 11:09:50.564757 2188520 command_runner.go:130] >             "SystemdCgroup": false
	I0916 11:09:50.564761 2188520 command_runner.go:130] >           },
	I0916 11:09:50.564772 2188520 command_runner.go:130] >           "privileged_without_host_devices": false,
	I0916 11:09:50.564781 2188520 command_runner.go:130] >           "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:09:50.564786 2188520 command_runner.go:130] >           "baseRuntimeSpec": "",
	I0916 11:09:50.564790 2188520 command_runner.go:130] >           "cniConfDir": "",
	I0916 11:09:50.564800 2188520 command_runner.go:130] >           "cniMaxConfNum": 0,
	I0916 11:09:50.564804 2188520 command_runner.go:130] >           "snapshotter": "",
	I0916 11:09:50.564809 2188520 command_runner.go:130] >           "sandboxMode": "podsandbox"
	I0916 11:09:50.564814 2188520 command_runner.go:130] >         }
	I0916 11:09:50.564817 2188520 command_runner.go:130] >       },
	I0916 11:09:50.564821 2188520 command_runner.go:130] >       "noPivot": false,
	I0916 11:09:50.564827 2188520 command_runner.go:130] >       "disableSnapshotAnnotations": true,
	I0916 11:09:50.564833 2188520 command_runner.go:130] >       "discardUnpackedLayers": true,
	I0916 11:09:50.564838 2188520 command_runner.go:130] >       "ignoreBlockIONotEnabledErrors": false,
	I0916 11:09:50.564843 2188520 command_runner.go:130] >       "ignoreRdtNotEnabledErrors": false
	I0916 11:09:50.564848 2188520 command_runner.go:130] >     },
	I0916 11:09:50.564852 2188520 command_runner.go:130] >     "cni": {
	I0916 11:09:50.564856 2188520 command_runner.go:130] >       "binDir": "/opt/cni/bin",
	I0916 11:09:50.564869 2188520 command_runner.go:130] >       "confDir": "/etc/cni/net.d",
	I0916 11:09:50.564873 2188520 command_runner.go:130] >       "maxConfNum": 1,
	I0916 11:09:50.564877 2188520 command_runner.go:130] >       "setupSerially": false,
	I0916 11:09:50.564882 2188520 command_runner.go:130] >       "confTemplate": "",
	I0916 11:09:50.564891 2188520 command_runner.go:130] >       "ipPref": ""
	I0916 11:09:50.564895 2188520 command_runner.go:130] >     },
	I0916 11:09:50.564899 2188520 command_runner.go:130] >     "registry": {
	I0916 11:09:50.564905 2188520 command_runner.go:130] >       "configPath": "/etc/containerd/certs.d",
	I0916 11:09:50.564909 2188520 command_runner.go:130] >       "mirrors": null,
	I0916 11:09:50.564913 2188520 command_runner.go:130] >       "configs": null,
	I0916 11:09:50.564920 2188520 command_runner.go:130] >       "auths": null,
	I0916 11:09:50.564924 2188520 command_runner.go:130] >       "headers": null
	I0916 11:09:50.564931 2188520 command_runner.go:130] >     },
	I0916 11:09:50.564937 2188520 command_runner.go:130] >     "imageDecryption": {
	I0916 11:09:50.564943 2188520 command_runner.go:130] >       "keyModel": "node"
	I0916 11:09:50.564946 2188520 command_runner.go:130] >     },
	I0916 11:09:50.564950 2188520 command_runner.go:130] >     "disableTCPService": true,
	I0916 11:09:50.564954 2188520 command_runner.go:130] >     "streamServerAddress": "",
	I0916 11:09:50.564959 2188520 command_runner.go:130] >     "streamServerPort": "10010",
	I0916 11:09:50.564963 2188520 command_runner.go:130] >     "streamIdleTimeout": "4h0m0s",
	I0916 11:09:50.564971 2188520 command_runner.go:130] >     "enableSelinux": false,
	I0916 11:09:50.564976 2188520 command_runner.go:130] >     "selinuxCategoryRange": 1024,
	I0916 11:09:50.564986 2188520 command_runner.go:130] >     "sandboxImage": "registry.k8s.io/pause:3.10",
	I0916 11:09:50.564991 2188520 command_runner.go:130] >     "statsCollectPeriod": 10,
	I0916 11:09:50.564995 2188520 command_runner.go:130] >     "systemdCgroup": false,
	I0916 11:09:50.564999 2188520 command_runner.go:130] >     "enableTLSStreaming": false,
	I0916 11:09:50.565004 2188520 command_runner.go:130] >     "x509KeyPairStreaming": {
	I0916 11:09:50.565009 2188520 command_runner.go:130] >       "tlsCertFile": "",
	I0916 11:09:50.565014 2188520 command_runner.go:130] >       "tlsKeyFile": ""
	I0916 11:09:50.565025 2188520 command_runner.go:130] >     },
	I0916 11:09:50.565029 2188520 command_runner.go:130] >     "maxContainerLogSize": 16384,
	I0916 11:09:50.565033 2188520 command_runner.go:130] >     "disableCgroup": false,
	I0916 11:09:50.565037 2188520 command_runner.go:130] >     "disableApparmor": false,
	I0916 11:09:50.565043 2188520 command_runner.go:130] >     "restrictOOMScoreAdj": false,
	I0916 11:09:50.565047 2188520 command_runner.go:130] >     "maxConcurrentDownloads": 3,
	I0916 11:09:50.565054 2188520 command_runner.go:130] >     "disableProcMount": false,
	I0916 11:09:50.565058 2188520 command_runner.go:130] >     "unsetSeccompProfile": "",
	I0916 11:09:50.565062 2188520 command_runner.go:130] >     "tolerateMissingHugetlbController": true,
	I0916 11:09:50.565068 2188520 command_runner.go:130] >     "disableHugetlbController": true,
	I0916 11:09:50.565074 2188520 command_runner.go:130] >     "device_ownership_from_security_context": false,
	I0916 11:09:50.565078 2188520 command_runner.go:130] >     "ignoreImageDefinedVolumes": false,
	I0916 11:09:50.565085 2188520 command_runner.go:130] >     "netnsMountsUnderStateDir": false,
	I0916 11:09:50.565089 2188520 command_runner.go:130] >     "enableUnprivilegedPorts": true,
	I0916 11:09:50.565100 2188520 command_runner.go:130] >     "enableUnprivilegedICMP": false,
	I0916 11:09:50.565104 2188520 command_runner.go:130] >     "enableCDI": false,
	I0916 11:09:50.565108 2188520 command_runner.go:130] >     "cdiSpecDirs": [
	I0916 11:09:50.565111 2188520 command_runner.go:130] >       "/etc/cdi",
	I0916 11:09:50.565118 2188520 command_runner.go:130] >       "/var/run/cdi"
	I0916 11:09:50.565121 2188520 command_runner.go:130] >     ],
	I0916 11:09:50.565128 2188520 command_runner.go:130] >     "imagePullProgressTimeout": "5m0s",
	I0916 11:09:50.565132 2188520 command_runner.go:130] >     "drainExecSyncIOTimeout": "0s",
	I0916 11:09:50.565139 2188520 command_runner.go:130] >     "imagePullWithSyncFs": false,
	I0916 11:09:50.565144 2188520 command_runner.go:130] >     "ignoreDeprecationWarnings": null,
	I0916 11:09:50.565149 2188520 command_runner.go:130] >     "containerdRootDir": "/var/lib/containerd",
	I0916 11:09:50.565157 2188520 command_runner.go:130] >     "containerdEndpoint": "/run/containerd/containerd.sock",
	I0916 11:09:50.565163 2188520 command_runner.go:130] >     "rootDir": "/var/lib/containerd/io.containerd.grpc.v1.cri",
	I0916 11:09:50.565170 2188520 command_runner.go:130] >     "stateDir": "/run/containerd/io.containerd.grpc.v1.cri"
	I0916 11:09:50.565174 2188520 command_runner.go:130] >   },
	I0916 11:09:50.565178 2188520 command_runner.go:130] >   "golang": "go1.22.7",
	I0916 11:09:50.565186 2188520 command_runner.go:130] >   "lastCNILoadStatus": "OK",
	I0916 11:09:50.565193 2188520 command_runner.go:130] >   "lastCNILoadStatus.default": "OK"
	I0916 11:09:50.565197 2188520 command_runner.go:130] > }
	I0916 11:09:50.568364 2188520 cni.go:84] Creating CNI manager for ""
	I0916 11:09:50.568397 2188520 cni.go:136] multinode detected (3 nodes found), recommending kindnet
	I0916 11:09:50.568413 2188520 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:09:50.568434 2188520 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.58.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-890146 NodeName:multinode-890146 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.58.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.58.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPat
h:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:09:50.568565 2188520 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.58.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "multinode-890146"
	  kubeletExtraArgs:
	    node-ip: 192.168.58.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.58.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:09:50.568642 2188520 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:09:50.576573 2188520 command_runner.go:130] > kubeadm
	I0916 11:09:50.576594 2188520 command_runner.go:130] > kubectl
	I0916 11:09:50.576599 2188520 command_runner.go:130] > kubelet
	I0916 11:09:50.577757 2188520 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:09:50.577842 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 11:09:50.586724 2188520 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (320 bytes)
	I0916 11:09:50.605503 2188520 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:09:50.623901 2188520 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2170 bytes)
	I0916 11:09:50.642094 2188520 ssh_runner.go:195] Run: grep 192.168.58.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:09:50.645953 2188520 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.58.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:09:50.656515 2188520 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:09:50.738939 2188520 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:09:50.753200 2188520 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146 for IP: 192.168.58.2
	I0916 11:09:50.753328 2188520 certs.go:194] generating shared ca certs ...
	I0916 11:09:50.753362 2188520 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:50.753565 2188520 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 11:09:50.753639 2188520 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 11:09:50.753675 2188520 certs.go:256] generating profile certs ...
	I0916 11:09:50.753830 2188520 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key
	I0916 11:09:50.753945 2188520 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key.cd70a0e7
	I0916 11:09:50.754023 2188520 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key
	I0916 11:09:50.754062 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:09:50.754097 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:09:50.754136 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:09:50.754175 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:09:50.754205 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 11:09:50.754246 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 11:09:50.754283 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 11:09:50.754313 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 11:09:50.754400 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 11:09:50.754460 2188520 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 11:09:50.754482 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:09:50.754543 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 11:09:50.754599 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:09:50.754652 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 11:09:50.754753 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:09:50.754814 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 11:09:50.754855 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:50.754895 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 11:09:50.755530 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:09:50.787749 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 11:09:50.813604 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:09:50.840709 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 11:09:50.868822 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 11:09:50.902542 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 11:09:50.929063 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:09:50.953973 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 11:09:50.980379 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 11:09:51.011926 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:09:51.040163 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 11:09:51.068361 2188520 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 11:09:51.088195 2188520 ssh_runner.go:195] Run: openssl version
	I0916 11:09:51.094587 2188520 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:09:51.094811 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 11:09:51.105697 2188520 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 11:09:51.109823 2188520 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:09:51.109868 2188520 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:09:51.109952 2188520 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 11:09:51.117286 2188520 command_runner.go:130] > 3ec20f2e
	I0916 11:09:51.117763 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:09:51.127773 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:09:51.137763 2188520 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:51.141620 2188520 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:51.141662 2188520 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:51.141717 2188520 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:09:51.149074 2188520 command_runner.go:130] > b5213941
	I0916 11:09:51.149434 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:09:51.159905 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 11:09:51.169756 2188520 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 11:09:51.173410 2188520 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:09:51.173445 2188520 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:09:51.173498 2188520 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 11:09:51.180729 2188520 command_runner.go:130] > 51391683
	I0916 11:09:51.181200 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 11:09:51.190569 2188520 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:09:51.194207 2188520 command_runner.go:130] >   File: /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:09:51.194232 2188520 command_runner.go:130] >   Size: 1176      	Blocks: 8          IO Block: 4096   regular file
	I0916 11:09:51.194240 2188520 command_runner.go:130] > Device: 10301h/66305d	Inode: 1081533     Links: 1
	I0916 11:09:51.194246 2188520 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:09:51.194272 2188520 command_runner.go:130] > Access: 2024-09-16 11:07:17.780345562 +0000
	I0916 11:09:51.194282 2188520 command_runner.go:130] > Modify: 2024-09-16 11:07:17.780345562 +0000
	I0916 11:09:51.194288 2188520 command_runner.go:130] > Change: 2024-09-16 11:07:17.780345562 +0000
	I0916 11:09:51.194295 2188520 command_runner.go:130] >  Birth: 2024-09-16 11:07:17.780345562 +0000
	I0916 11:09:51.194383 2188520 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 11:09:51.201010 2188520 command_runner.go:130] > Certificate will not expire
	I0916 11:09:51.201403 2188520 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 11:09:51.208222 2188520 command_runner.go:130] > Certificate will not expire
	I0916 11:09:51.208613 2188520 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 11:09:51.215152 2188520 command_runner.go:130] > Certificate will not expire
	I0916 11:09:51.215605 2188520 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 11:09:51.222143 2188520 command_runner.go:130] > Certificate will not expire
	I0916 11:09:51.222617 2188520 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 11:09:51.229286 2188520 command_runner.go:130] > Certificate will not expire
	I0916 11:09:51.229741 2188520 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 11:09:51.236523 2188520 command_runner.go:130] > Certificate will not expire
	I0916 11:09:51.236913 2188520 kubeadm.go:392] StartCluster: {Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true} {Name:m03 IP:192.168.58.4 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:
false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClie
ntPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:09:51.237036 2188520 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 11:09:51.237100 2188520 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 11:09:51.274199 2188520 command_runner.go:130] > e8a9035126acce637354968a7c4092e855f0da6d2a3f39d1f94b5795e1d5079b
	I0916 11:09:51.274247 2188520 command_runner.go:130] > 0ca1a17f4990929d27725b61f7cdbaae3f44772041814b26daa29ba43c68ec37
	I0916 11:09:51.274254 2188520 command_runner.go:130] > eccab3e428039af99fd1b2378ae8fd52f2837469955a8b78fd8b72f906813586
	I0916 11:09:51.274262 2188520 command_runner.go:130] > 88800ca3adcdad421bba0ffcef548a966eeb5c210e5453a2ba8470a9e90ea01e
	I0916 11:09:51.274268 2188520 command_runner.go:130] > e8e11b0a6506f9d34c5800c4a5a6bcc8b9f3225a3487a3c437bc87d0b0aaf53d
	I0916 11:09:51.274274 2188520 command_runner.go:130] > 305b8895a34401a4626618470969b6ca3b591de13b0d36af0b2b2b23096ac46b
	I0916 11:09:51.274280 2188520 command_runner.go:130] > 424e6c1030bdc58751fd76a7652c31e5bd7dff844d888049b87815ddfaecc90b
	I0916 11:09:51.274294 2188520 command_runner.go:130] > 9d6ccf43cf5a5c28d56e616702330e693dc76d6773c7cc3e02e94f189195689b
	I0916 11:09:51.276994 2188520 cri.go:89] found id: "e8a9035126acce637354968a7c4092e855f0da6d2a3f39d1f94b5795e1d5079b"
	I0916 11:09:51.277013 2188520 cri.go:89] found id: "0ca1a17f4990929d27725b61f7cdbaae3f44772041814b26daa29ba43c68ec37"
	I0916 11:09:51.277019 2188520 cri.go:89] found id: "eccab3e428039af99fd1b2378ae8fd52f2837469955a8b78fd8b72f906813586"
	I0916 11:09:51.277023 2188520 cri.go:89] found id: "88800ca3adcdad421bba0ffcef548a966eeb5c210e5453a2ba8470a9e90ea01e"
	I0916 11:09:51.277026 2188520 cri.go:89] found id: "e8e11b0a6506f9d34c5800c4a5a6bcc8b9f3225a3487a3c437bc87d0b0aaf53d"
	I0916 11:09:51.277038 2188520 cri.go:89] found id: "305b8895a34401a4626618470969b6ca3b591de13b0d36af0b2b2b23096ac46b"
	I0916 11:09:51.277042 2188520 cri.go:89] found id: "424e6c1030bdc58751fd76a7652c31e5bd7dff844d888049b87815ddfaecc90b"
	I0916 11:09:51.277045 2188520 cri.go:89] found id: "9d6ccf43cf5a5c28d56e616702330e693dc76d6773c7cc3e02e94f189195689b"
	I0916 11:09:51.277049 2188520 cri.go:89] found id: ""
	I0916 11:09:51.277113 2188520 ssh_runner.go:195] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0916 11:09:51.287999 2188520 command_runner.go:130] > null
	I0916 11:09:51.289856 2188520 cri.go:116] JSON = null
	W0916 11:09:51.289902 2188520 kubeadm.go:399] unpause failed: list paused: list returned 0 containers, but ps returned 8
	I0916 11:09:51.289974 2188520 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 11:09:51.298341 2188520 command_runner.go:130] > /var/lib/kubelet/config.yaml
	I0916 11:09:51.298373 2188520 command_runner.go:130] > /var/lib/kubelet/kubeadm-flags.env
	I0916 11:09:51.298380 2188520 command_runner.go:130] > /var/lib/minikube/etcd:
	I0916 11:09:51.298385 2188520 command_runner.go:130] > member
	I0916 11:09:51.299822 2188520 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 11:09:51.299862 2188520 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 11:09:51.299922 2188520 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 11:09:51.308802 2188520 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 11:09:51.309267 2188520 kubeconfig.go:47] verify endpoint returned: get endpoint: "multinode-890146" does not appear in /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:09:51.309376 2188520 kubeconfig.go:62] /home/jenkins/minikube-integration/19651-2057935/kubeconfig needs updating (will repair): [kubeconfig missing "multinode-890146" cluster setting kubeconfig missing "multinode-890146" context setting]
	I0916 11:09:51.309658 2188520 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:51.310033 2188520 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:09:51.310289 2188520 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:09:51.310898 2188520 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 11:09:51.311072 2188520 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 11:09:51.320903 2188520 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.58.2
	I0916 11:09:51.320935 2188520 kubeadm.go:597] duration metric: took 21.066702ms to restartPrimaryControlPlane
	I0916 11:09:51.320945 2188520 kubeadm.go:394] duration metric: took 84.048266ms to StartCluster
	I0916 11:09:51.320961 2188520 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:51.321021 2188520 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:09:51.321651 2188520 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:09:51.321865 2188520 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:09:51.322251 2188520 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:09:51.322303 2188520 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:09:51.324570 2188520 out.go:177] * Verifying Kubernetes components...
	I0916 11:09:51.326011 2188520 out.go:177] * Enabled addons: 
	I0916 11:09:51.328154 2188520 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:09:51.334189 2188520 addons.go:510] duration metric: took 11.880101ms for enable addons: enabled=[]
	I0916 11:09:51.465137 2188520 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:09:51.478010 2188520 node_ready.go:35] waiting up to 6m0s for node "multinode-890146" to be "Ready" ...
	I0916 11:09:51.478125 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:51.478137 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:51.478146 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:51.478151 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:51.478359 2188520 round_trippers.go:574] Response Status:  in 0 milliseconds
	I0916 11:09:51.478378 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:51.979185 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:51.979207 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:51.979217 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:51.979223 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.601283 2188520 round_trippers.go:574] Response Status: 200 OK in 3622 milliseconds
	I0916 11:09:55.601308 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.601316 2188520 round_trippers.go:580]     Audit-Id: f68cc669-ea42-4c19-ac8b-5d9c8fb48ac4
	I0916 11:09:55.601320 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.601323 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.601325 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:09:55.601328 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:09:55.601330 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.602486 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"574","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5285 chars]
	I0916 11:09:55.603288 2188520 node_ready.go:49] node "multinode-890146" has status "Ready":"True"
	I0916 11:09:55.603306 2188520 node_ready.go:38] duration metric: took 4.125263776s for node "multinode-890146" to be "Ready" ...
	I0916 11:09:55.603316 2188520 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:09:55.603361 2188520 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 11:09:55.603372 2188520 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 11:09:55.603431 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:09:55.603436 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:55.603444 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.603494 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:55.648372 2188520 round_trippers.go:574] Response Status: 200 OK in 44 milliseconds
	I0916 11:09:55.648394 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.648403 2188520 round_trippers.go:580]     Audit-Id: c546bc49-a259-43a4-9b3f-ba136942a300
	I0916 11:09:55.648408 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.648412 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.648414 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:09:55.648417 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:09:55.648420 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.650771 2188520 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"681"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 88984 chars]
	I0916 11:09:55.656568 2188520 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:55.656722 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:09:55.656749 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:55.656770 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:55.656788 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.659127 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:55.659190 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.659213 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:09:55.659228 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:09:55.659246 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.659280 2188520 round_trippers.go:580]     Audit-Id: 93ff6a75-f491-4296-8b4c-c386aa16536c
	I0916 11:09:55.659296 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.659310 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.659487 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"456","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6480 chars]
	I0916 11:09:55.660078 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:55.660125 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:55.660148 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:55.660168 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.662203 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:09:55.662246 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.662276 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.662292 2188520 round_trippers.go:580]     Audit-Id: 7279c683-3206-46af-8336-b67e141957dd
	I0916 11:09:55.662322 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.662342 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.662357 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:09:55.662372 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:09:55.662566 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"574","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5285 chars]
	I0916 11:09:55.663081 2188520 pod_ready.go:93] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"True"
	I0916 11:09:55.663122 2188520 pod_ready.go:82] duration metric: took 6.483362ms for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:55.663145 2188520 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:55.663251 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-890146
	I0916 11:09:55.663274 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:55.663295 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:55.663371 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.665801 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:55.665856 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.665876 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:09:55.665931 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.665953 2188520 round_trippers.go:580]     Audit-Id: 9270cb80-74f1-4f0c-b197-296e7e29143f
	I0916 11:09:55.665968 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.665984 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.666015 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:09:55.666182 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-890146","namespace":"kube-system","uid":"59c960ab-f6dd-4ed3-856c-ba2a02295b12","resourceVersion":"327","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.58.2:2379","kubernetes.io/config.hash":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.mirror":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.seen":"2024-09-16T11:07:32.783608135Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6440 chars]
	I0916 11:09:55.666780 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:55.666815 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:55.666837 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:55.666871 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.669335 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:55.669381 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.669413 2188520 round_trippers.go:580]     Audit-Id: ab6d5efa-acdb-4c48-86b9-3837989a9878
	I0916 11:09:55.669431 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.669459 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.669478 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:09:55.669494 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:09:55.669507 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.669672 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"574","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5285 chars]
	I0916 11:09:55.670112 2188520 pod_ready.go:93] pod "etcd-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:09:55.670149 2188520 pod_ready.go:82] duration metric: took 6.967019ms for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:55.670196 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:55.670292 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-890146
	I0916 11:09:55.670316 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:55.670347 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.670365 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:55.676471 2188520 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:09:55.676540 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.676569 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.676589 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.676617 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:09:55.676638 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:09:55.676654 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.676668 2188520 round_trippers.go:580]     Audit-Id: 2e75dd3d-3fb0-474e-ade6-ea2feeaffbd9
	I0916 11:09:55.677253 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-890146","namespace":"kube-system","uid":"9846229d-7227-453f-8c30-697aaba61648","resourceVersion":"432","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.58.2:8443","kubernetes.io/config.hash":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.mirror":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.seen":"2024-09-16T11:07:32.783610957Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8518 chars]
	I0916 11:09:55.677901 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:55.677944 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:55.677967 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:55.677984 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.680249 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:55.680310 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.680331 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.680363 2188520 round_trippers.go:580]     Audit-Id: ea0dff66-4ccc-4e4d-9c07-94561bb63cdb
	I0916 11:09:55.680381 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.680397 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.680427 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:09:55.680447 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:09:55.682471 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"574","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5285 chars]
	I0916 11:09:55.682986 2188520 pod_ready.go:93] pod "kube-apiserver-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:09:55.683039 2188520 pod_ready.go:82] duration metric: took 12.814348ms for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:55.683066 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:55.683163 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-890146
	I0916 11:09:55.683198 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:55.683222 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:55.683240 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.687323 2188520 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:09:55.687393 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.687415 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.687433 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.687462 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:09:55.687483 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:09:55.687501 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.687514 2188520 round_trippers.go:580]     Audit-Id: bf659452-357f-4525-9ee5-21e523c1fffe
	I0916 11:09:55.688385 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-890146","namespace":"kube-system","uid":"421b15a5-a8e2-4631-bf18-1592c8747b09","resourceVersion":"436","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.mirror":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.seen":"2024-09-16T11:07:32.783594137Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8093 chars]
	I0916 11:09:55.689044 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:55.689089 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:55.689112 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:55.689132 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.691200 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:55.691258 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.691281 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:09:55.691299 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:09:55.691313 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.691347 2188520 round_trippers.go:580]     Audit-Id: 9fb2f547-60ab-4303-b6d6-68506b93b250
	I0916 11:09:55.691362 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.691375 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.692343 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"574","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5285 chars]
	I0916 11:09:55.692831 2188520 pod_ready.go:93] pod "kube-controller-manager-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:09:55.692878 2188520 pod_ready.go:82] duration metric: took 9.79139ms for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:55.692905 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:55.692999 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:09:55.693033 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:55.693055 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:55.693073 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.698507 2188520 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:09:55.698579 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.698602 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.698620 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:09:55.698649 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:09:55.698669 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.698711 2188520 round_trippers.go:580]     Audit-Id: 7d1af39c-5d46-479a-86b0-d64567e69fee
	I0916 11:09:55.698749 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.699258 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"519","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6183 chars]
	I0916 11:09:55.804099 2188520 request.go:632] Waited for 104.230443ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:09:55.804218 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:09:55.804258 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:55.804284 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:55.804301 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:55.806938 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:55.807021 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:55.807044 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:55.807063 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:55.807092 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:55.807113 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:55.807127 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:55 GMT
	I0916 11:09:55.807143 2188520 round_trippers.go:580]     Audit-Id: 85a31c93-d309-4916-bbb0-ad514b71fdfd
	I0916 11:09:55.807775 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"578","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fiel
dsV1":{"f:metadata":{"f:annotations":{"f:node.alpha.kubernetes.io/ttl": [truncated 5073 chars]
	I0916 11:09:55.808241 2188520 pod_ready.go:93] pod "kube-proxy-59f9h" in "kube-system" namespace has status "Ready":"True"
	I0916 11:09:55.808276 2188520 pod_ready.go:82] duration metric: took 115.351427ms for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:55.808314 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:56.003565 2188520 request.go:632] Waited for 195.115425ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:09:56.003693 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:09:56.003729 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:56.003758 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:56.003776 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:56.008149 2188520 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:09:56.008324 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:56.008356 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:56.008375 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:56 GMT
	I0916 11:09:56.008408 2188520 round_trippers.go:580]     Audit-Id: aaee3400-7eae-4bb7-ac8c-473046adf84d
	I0916 11:09:56.008430 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:56.008449 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:56.008467 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:56.008635 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"730","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6579 chars]
	I0916 11:09:56.203530 2188520 request.go:632] Waited for 194.244586ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:56.203758 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:56.203789 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:56.203824 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:56.203855 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:56.206569 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:56.206649 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:56.206724 2188520 round_trippers.go:580]     Audit-Id: 1cb5720f-9213-419f-9c76-3cb4afddb934
	I0916 11:09:56.206751 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:56.206774 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:56.206813 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:56.206846 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:56.206868 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:56 GMT
	I0916 11:09:56.207073 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:09:56.403619 2188520 request.go:632] Waited for 94.083076ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:09:56.403741 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:09:56.403781 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:56.403816 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:56.403841 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:56.406745 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:56.406822 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:56.406870 2188520 round_trippers.go:580]     Audit-Id: 771b39c7-d741-412b-b9cb-c93f1b0eebfa
	I0916 11:09:56.406903 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:56.406924 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:56.406940 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:56.406955 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:56.406987 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:56 GMT
	I0916 11:09:56.407123 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"730","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6579 chars]
	I0916 11:09:56.604056 2188520 request.go:632] Waited for 196.324665ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:56.604211 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:56.604237 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:56.604267 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:56.604293 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:56.610445 2188520 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:09:56.610527 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:56.610550 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:56.610583 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:56.610618 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:56 GMT
	I0916 11:09:56.610639 2188520 round_trippers.go:580]     Audit-Id: f4d4348c-4e85-45aa-96d3-beb2800067bb
	I0916 11:09:56.610655 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:56.610700 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:56.610945 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:09:56.809371 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:09:56.809447 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:56.809474 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:56.809494 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:56.812333 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:56.812461 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:56.812503 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:56.812526 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:56.812552 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:56 GMT
	I0916 11:09:56.812588 2188520 round_trippers.go:580]     Audit-Id: 09a70582-8081-4617-ba52-d005ff5d0af5
	I0916 11:09:56.812610 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:56.812632 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:56.812803 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"730","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6579 chars]
	I0916 11:09:57.007321 2188520 request.go:632] Waited for 193.813031ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:57.007416 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:57.007430 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:57.007439 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:57.007444 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:57.010448 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:57.010475 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:57.010489 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:57.010495 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:57.010500 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:57 GMT
	I0916 11:09:57.010503 2188520 round_trippers.go:580]     Audit-Id: 97a290fc-5ccf-4c4a-95ba-912a3285b029
	I0916 11:09:57.010506 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:57.010510 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:57.011225 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:09:57.309084 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:09:57.309110 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:57.309123 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:57.309128 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:57.311674 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:57.311750 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:57.311772 2188520 round_trippers.go:580]     Audit-Id: 519c7ee8-41fb-4583-b3df-3489b9156a9a
	I0916 11:09:57.311787 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:57.311817 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:57.311865 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:57.311881 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:57.311892 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:57 GMT
	I0916 11:09:57.312039 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"759","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6388 chars]
	I0916 11:09:57.403854 2188520 request.go:632] Waited for 91.277905ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:57.403969 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:09:57.403983 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:57.404036 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:57.404048 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:57.406585 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:57.406660 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:57.406790 2188520 round_trippers.go:580]     Audit-Id: 03d51704-9e2c-4dfb-a745-03916578b354
	I0916 11:09:57.406820 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:57.406839 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:57.406865 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:57.406881 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:57.406898 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:57 GMT
	I0916 11:09:57.407819 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:09:57.408307 2188520 pod_ready.go:93] pod "kube-proxy-fm5qr" in "kube-system" namespace has status "Ready":"True"
	I0916 11:09:57.408353 2188520 pod_ready.go:82] duration metric: took 1.600014395s for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:57.408379 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-vl27g" in "kube-system" namespace to be "Ready" ...
	I0916 11:09:57.603720 2188520 request.go:632] Waited for 195.256888ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:09:57.603843 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:09:57.603875 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:57.603907 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:57.603927 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:57.608712 2188520 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:09:57.608818 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:57.608841 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:57 GMT
	I0916 11:09:57.608861 2188520 round_trippers.go:580]     Audit-Id: 6dbaed1a-3c92-4836-8532-e417bd9ac414
	I0916 11:09:57.608876 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:57.608907 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:57.608923 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:57.608939 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:57.609086 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:09:57.804044 2188520 request.go:632] Waited for 194.310119ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:09:57.804130 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:09:57.804142 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:57.804172 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:57.804182 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:57.806519 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:57.806540 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:57.806547 2188520 round_trippers.go:580]     Audit-Id: 96eb583a-65bf-4472-8f70-a136b4b9d1e2
	I0916 11:09:57.806551 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:57.806555 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:57.806589 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:57.806597 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:57.806600 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:57 GMT
	I0916 11:09:57.806925 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:09:58.003845 2188520 request.go:632] Waited for 95.210364ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:09:58.003928 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:09:58.003935 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:58.003944 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:58.003948 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:58.007851 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:09:58.007944 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:58.007967 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:58.007983 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:58.008014 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:58.008037 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:58.008054 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:58 GMT
	I0916 11:09:58.008071 2188520 round_trippers.go:580]     Audit-Id: e751c44a-e34a-423a-a8a3-a8aa0dc25edb
	I0916 11:09:58.008254 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:09:58.204312 2188520 request.go:632] Waited for 195.448353ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:09:58.204434 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:09:58.204449 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:58.204472 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:58.204483 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:58.206792 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:58.206817 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:58.206825 2188520 round_trippers.go:580]     Audit-Id: f50a0196-a459-4d7a-9247-fa027520d509
	I0916 11:09:58.206829 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:58.206832 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:58.206834 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:58.206838 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:58.206841 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:58 GMT
	I0916 11:09:58.207140 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:09:58.408968 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:09:58.408991 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:58.409001 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:58.409022 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:58.411494 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:58.411568 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:58.411604 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:58.411626 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:58.411642 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:58.411660 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:58 GMT
	I0916 11:09:58.411693 2188520 round_trippers.go:580]     Audit-Id: 45a32aac-3e55-4359-aa5d-963bb3e9dcc7
	I0916 11:09:58.411719 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:58.412329 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:09:58.604273 2188520 request.go:632] Waited for 191.367202ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:09:58.604372 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:09:58.604383 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:58.604400 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:58.604408 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:58.607145 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:58.607177 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:58.607187 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:58.607192 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:58 GMT
	I0916 11:09:58.607196 2188520 round_trippers.go:580]     Audit-Id: b132604f-a8d9-4933-b05b-1d192a0fa0e8
	I0916 11:09:58.607199 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:58.607201 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:58.607204 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:58.607705 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:09:58.908641 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:09:58.908664 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:58.908674 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:58.908678 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:58.911225 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:58.911298 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:58.911334 2188520 round_trippers.go:580]     Audit-Id: b108f11d-f4ba-4ed9-b8dc-d7295fd8b44b
	I0916 11:09:58.911356 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:58.911373 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:58.911405 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:58.911425 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:58.911434 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:58 GMT
	I0916 11:09:58.911634 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:09:59.003521 2188520 request.go:632] Waited for 91.313065ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:09:59.003666 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:09:59.003701 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:59.003727 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:59.003746 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:59.008248 2188520 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:09:59.008281 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:59.008290 2188520 round_trippers.go:580]     Audit-Id: 5a4c6de0-c15d-4c7e-8732-8a9b92f0af45
	I0916 11:09:59.008294 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:59.008331 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:59.008334 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:59.008337 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:59.008340 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:59 GMT
	I0916 11:09:59.008490 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:09:59.408905 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:09:59.408929 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:59.408939 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:59.408945 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:59.411255 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:59.411346 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:59.411370 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:59.411406 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:59.411422 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:59.411432 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:59 GMT
	I0916 11:09:59.411435 2188520 round_trippers.go:580]     Audit-Id: 6faff576-6b3d-4102-abc8-6d8e9289f097
	I0916 11:09:59.411440 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:59.411587 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:09:59.412183 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:09:59.412202 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:59.412210 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:59.412214 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:59.414107 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:09:59.414164 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:59.414186 2188520 round_trippers.go:580]     Audit-Id: 95017177-ef82-463e-b985-b07410acb2ce
	I0916 11:09:59.414201 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:59.414220 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:59.414266 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:59.414275 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:59.414280 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:59 GMT
	I0916 11:09:59.414411 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:09:59.414821 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:09:59.908675 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:09:59.908702 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:59.908711 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:59.908716 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:59.911246 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:59.911267 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:59.911276 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:59.911282 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:59.911285 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:59.911289 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:59 GMT
	I0916 11:09:59.911298 2188520 round_trippers.go:580]     Audit-Id: 92917a38-09d7-438e-b99d-3a5812fdd45d
	I0916 11:09:59.911301 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:59.911606 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:09:59.912137 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:09:59.912148 2188520 round_trippers.go:469] Request Headers:
	I0916 11:09:59.912157 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:09:59.912161 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:09:59.914668 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:09:59.914777 2188520 round_trippers.go:577] Response Headers:
	I0916 11:09:59.914794 2188520 round_trippers.go:580]     Audit-Id: f8490813-d7cd-4a13-8d8e-2be9de1a716e
	I0916 11:09:59.914800 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:09:59.914804 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:09:59.914808 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:09:59.914811 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:09:59.914814 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:09:59 GMT
	I0916 11:09:59.914941 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:00.409552 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:00.409592 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:00.409603 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:00.409608 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:00.413464 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:00.413489 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:00.413498 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:00.413502 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:00.413507 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:00.413510 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:00.413513 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:00 GMT
	I0916 11:10:00.413516 2188520 round_trippers.go:580]     Audit-Id: 663b9777-3abd-41c1-9fa7-93670f3a458d
	I0916 11:10:00.413636 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:00.414235 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:00.414248 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:00.414257 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:00.414261 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:00.423837 2188520 round_trippers.go:574] Response Status: 200 OK in 9 milliseconds
	I0916 11:10:00.423863 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:00.423874 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:00.423879 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:00.423885 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:00.423889 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:00 GMT
	I0916 11:10:00.423892 2188520 round_trippers.go:580]     Audit-Id: 4a19e905-722a-483e-8647-599e579c7abb
	I0916 11:10:00.423895 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:00.424058 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:00.909195 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:00.909220 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:00.909229 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:00.909234 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:00.911843 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:00.911922 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:00.911942 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:00.911958 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:00.911996 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:00.912019 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:00 GMT
	I0916 11:10:00.912034 2188520 round_trippers.go:580]     Audit-Id: a708b225-6ba7-4afd-b1c2-7fa5ae99352a
	I0916 11:10:00.912050 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:00.912233 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:00.912780 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:00.912799 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:00.912808 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:00.912813 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:00.915085 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:00.915108 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:00.915116 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:00 GMT
	I0916 11:10:00.915120 2188520 round_trippers.go:580]     Audit-Id: 5105d9cc-acf3-4991-8d32-27be9002af24
	I0916 11:10:00.915123 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:00.915126 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:00.915129 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:00.915132 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:00.915397 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:01.408670 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:01.408700 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:01.408717 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:01.408723 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:01.411016 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:01.411053 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:01.411062 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:01.411066 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:01 GMT
	I0916 11:10:01.411070 2188520 round_trippers.go:580]     Audit-Id: 8f259645-f852-4fa7-88ff-8a4c7b5c8f32
	I0916 11:10:01.411075 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:01.411078 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:01.411081 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:01.411427 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:01.411982 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:01.412000 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:01.412010 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:01.412016 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:01.414022 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:01.414046 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:01.414054 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:01 GMT
	I0916 11:10:01.414058 2188520 round_trippers.go:580]     Audit-Id: 00730038-8068-4d2b-8eda-305695c966cf
	I0916 11:10:01.414062 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:01.414064 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:01.414067 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:01.414070 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:01.414189 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:01.909334 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:01.909363 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:01.909373 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:01.909379 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:01.911816 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:01.911846 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:01.911860 2188520 round_trippers.go:580]     Audit-Id: a22acea0-cb2f-4702-8938-a00dad58ac0d
	I0916 11:10:01.911865 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:01.911868 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:01.911871 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:01.911875 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:01.911883 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:01 GMT
	I0916 11:10:01.912034 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:01.912581 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:01.912598 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:01.912606 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:01.912611 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:01.914753 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:01.914773 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:01.914782 2188520 round_trippers.go:580]     Audit-Id: b15c4510-aa85-4165-bdce-dbaddab6c89e
	I0916 11:10:01.914786 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:01.914789 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:01.914791 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:01.914794 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:01.914797 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:01 GMT
	I0916 11:10:01.914942 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:01.915303 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:02.408652 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:02.408677 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:02.408686 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:02.408692 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:02.411095 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:02.411117 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:02.411126 2188520 round_trippers.go:580]     Audit-Id: 9dbd77a3-7f71-480e-8244-201641de4f97
	I0916 11:10:02.411131 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:02.411134 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:02.411137 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:02.411140 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:02.411142 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:02 GMT
	I0916 11:10:02.411341 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:02.411877 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:02.411907 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:02.411916 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:02.411926 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:02.414029 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:02.414059 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:02.414065 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:02.414071 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:02 GMT
	I0916 11:10:02.414075 2188520 round_trippers.go:580]     Audit-Id: 32f44c38-2a16-4f0d-93a4-d19ededa784d
	I0916 11:10:02.414078 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:02.414082 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:02.414084 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:02.414469 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:02.909185 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:02.909212 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:02.909222 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:02.909226 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:02.911559 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:02.911589 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:02.911598 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:02.911603 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:02.911606 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:02.911609 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:02.911612 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:02 GMT
	I0916 11:10:02.911614 2188520 round_trippers.go:580]     Audit-Id: b95e0850-ad7f-4735-b98d-bd2dfad73b9a
	I0916 11:10:02.911979 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:02.912545 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:02.912565 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:02.912574 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:02.912579 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:02.914711 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:02.914821 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:02.914842 2188520 round_trippers.go:580]     Audit-Id: eeeda05a-4560-46d8-8a26-0544df087b4a
	I0916 11:10:02.914861 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:02.914901 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:02.914912 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:02.914916 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:02.914919 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:02 GMT
	I0916 11:10:02.915073 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:03.409571 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:03.409601 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:03.409609 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:03.409615 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:03.412074 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:03.412097 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:03.412106 2188520 round_trippers.go:580]     Audit-Id: d58fefc4-0c12-40f4-9bc8-c5d3946d4361
	I0916 11:10:03.412123 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:03.412128 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:03.412131 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:03.412134 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:03.412137 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:03 GMT
	I0916 11:10:03.412266 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:03.412795 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:03.412805 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:03.412814 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:03.412818 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:03.414802 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:03.414821 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:03.414830 2188520 round_trippers.go:580]     Audit-Id: 8723953e-d71b-4a15-bd5a-b484fdb9aa03
	I0916 11:10:03.414835 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:03.414859 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:03.414863 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:03.414866 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:03.414869 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:03 GMT
	I0916 11:10:03.415128 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:03.909182 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:03.909208 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:03.909216 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:03.909221 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:03.911957 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:03.911985 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:03.911993 2188520 round_trippers.go:580]     Audit-Id: 2fb31f97-de03-4d51-a351-2ce4123cb127
	I0916 11:10:03.911998 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:03.912004 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:03.912007 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:03.912011 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:03.912014 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:03 GMT
	I0916 11:10:03.912218 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:03.912759 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:03.912769 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:03.912777 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:03.912781 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:03.914880 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:03.914905 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:03.914915 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:03.914919 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:03.914924 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:03.914927 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:03 GMT
	I0916 11:10:03.914931 2188520 round_trippers.go:580]     Audit-Id: 7ebb26c7-1735-4cd0-8f9d-79acf63c2bc8
	I0916 11:10:03.914934 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:03.915386 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:03.915776 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:04.408643 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:04.408666 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:04.408677 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:04.408681 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:04.411080 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:04.411109 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:04.411119 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:04.411124 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:04.411157 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:04 GMT
	I0916 11:10:04.411161 2188520 round_trippers.go:580]     Audit-Id: b162f638-fc83-4b66-92d4-8550e8dcc2c6
	I0916 11:10:04.411164 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:04.411168 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:04.411548 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:04.412101 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:04.412118 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:04.412127 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:04.412133 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:04.414382 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:04.414407 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:04.414415 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:04 GMT
	I0916 11:10:04.414420 2188520 round_trippers.go:580]     Audit-Id: b202b3fc-c514-439a-b3bf-e336a18e6e2d
	I0916 11:10:04.414423 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:04.414426 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:04.414429 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:04.414436 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:04.414563 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:04.908564 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:04.908598 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:04.908609 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:04.908615 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:04.911020 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:04.911043 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:04.911051 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:04.911055 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:04.911058 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:04 GMT
	I0916 11:10:04.911061 2188520 round_trippers.go:580]     Audit-Id: c8b01fcf-b4bf-4b38-8bd9-b63b2e0beb49
	I0916 11:10:04.911064 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:04.911067 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:04.911262 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:04.911812 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:04.911830 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:04.911839 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:04.911844 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:04.914082 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:04.914101 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:04.914109 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:04.914114 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:04.914117 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:04.914121 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:04.914125 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:04 GMT
	I0916 11:10:04.914128 2188520 round_trippers.go:580]     Audit-Id: d515508e-03a8-44b4-a3b0-2d26e5b41a1f
	I0916 11:10:04.914259 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:05.409357 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:05.409401 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:05.409411 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:05.409417 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:05.411782 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:05.411804 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:05.411812 2188520 round_trippers.go:580]     Audit-Id: 033ba6bb-7448-48b9-801a-7e6dc0551636
	I0916 11:10:05.411817 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:05.411820 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:05.411822 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:05.411829 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:05.411832 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:05 GMT
	I0916 11:10:05.411973 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:05.412500 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:05.412509 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:05.412517 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:05.412521 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:05.414649 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:05.414694 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:05.414704 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:05.414708 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:05 GMT
	I0916 11:10:05.414711 2188520 round_trippers.go:580]     Audit-Id: 0cf2125a-ff34-412b-8693-d167832d427f
	I0916 11:10:05.414715 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:05.414718 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:05.414743 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:05.414965 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:05.909154 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:05.909183 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:05.909193 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:05.909198 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:05.911628 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:05.911654 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:05.911662 2188520 round_trippers.go:580]     Audit-Id: afaea6aa-c341-40b4-a981-0425f0f3d041
	I0916 11:10:05.911667 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:05.911674 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:05.911677 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:05.911680 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:05.911683 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:05 GMT
	I0916 11:10:05.911798 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:05.912341 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:05.912358 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:05.912367 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:05.912370 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:05.914371 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:05.914396 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:05.914404 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:05 GMT
	I0916 11:10:05.914408 2188520 round_trippers.go:580]     Audit-Id: b33d59fe-ad5b-4ce8-9355-817415e44403
	I0916 11:10:05.914412 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:05.914420 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:05.914423 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:05.914426 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:05.914540 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:06.409392 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:06.409419 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:06.409428 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:06.409432 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:06.411760 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:06.411785 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:06.411794 2188520 round_trippers.go:580]     Audit-Id: 8c37ce84-62ad-45fa-aea0-d00c2948167b
	I0916 11:10:06.411800 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:06.411803 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:06.411805 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:06.411808 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:06.411812 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:06 GMT
	I0916 11:10:06.412088 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:06.412646 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:06.412664 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:06.412672 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:06.412677 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:06.414759 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:06.414782 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:06.414791 2188520 round_trippers.go:580]     Audit-Id: 0bf35cc0-e7c4-48c7-8abe-fe8fa384c181
	I0916 11:10:06.414796 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:06.414800 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:06.414802 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:06.414805 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:06.414808 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:06 GMT
	I0916 11:10:06.414943 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:06.415310 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:06.908979 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:06.909003 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:06.909014 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:06.909019 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:06.911415 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:06.911487 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:06.911520 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:06.911542 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:06 GMT
	I0916 11:10:06.911557 2188520 round_trippers.go:580]     Audit-Id: dfffd326-c58d-4939-90b2-0332538c126f
	I0916 11:10:06.911574 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:06.911602 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:06.911623 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:06.911782 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:06.912330 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:06.912350 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:06.912359 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:06.912366 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:06.914550 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:06.914568 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:06.914577 2188520 round_trippers.go:580]     Audit-Id: 21f1ae1f-94ae-42da-81e8-775111babb01
	I0916 11:10:06.914588 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:06.914592 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:06.914595 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:06.914598 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:06.914601 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:06 GMT
	I0916 11:10:06.914853 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:07.409394 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:07.409421 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:07.409431 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:07.409436 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:07.411937 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:07.411973 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:07.411982 2188520 round_trippers.go:580]     Audit-Id: 5f7ae782-241a-45dd-80ff-152da17a0929
	I0916 11:10:07.411986 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:07.411989 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:07.411999 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:07.412003 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:07.412007 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:07 GMT
	I0916 11:10:07.412121 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:07.412653 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:07.412672 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:07.412679 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:07.412685 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:07.414672 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:07.414714 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:07.414729 2188520 round_trippers.go:580]     Audit-Id: 25681a39-acfc-40c2-b66a-5b1c60e21076
	I0916 11:10:07.414734 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:07.414738 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:07.414741 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:07.414745 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:07.414748 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:07 GMT
	I0916 11:10:07.414968 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:07.909112 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:07.909139 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:07.909149 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:07.909154 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:07.911546 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:07.911621 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:07.911658 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:07.911681 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:07.911698 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:07.911716 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:07.911750 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:07 GMT
	I0916 11:10:07.911766 2188520 round_trippers.go:580]     Audit-Id: fa079cc1-d504-4681-b7dc-9fda037f80a9
	I0916 11:10:07.911923 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:07.912507 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:07.912526 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:07.912536 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:07.912540 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:07.914739 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:07.914765 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:07.914774 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:07.914778 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:07.914782 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:07.914785 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:07.914788 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:07 GMT
	I0916 11:10:07.914792 2188520 round_trippers.go:580]     Audit-Id: 1b378d77-b07e-4f7a-9a75-b312b07919e8
	I0916 11:10:07.918670 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:08.408695 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:08.408722 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:08.408738 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:08.408747 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:08.411464 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:08.411486 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:08.411494 2188520 round_trippers.go:580]     Audit-Id: 94ed0f4f-a761-492d-86a5-1ac15d31e533
	I0916 11:10:08.411501 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:08.411506 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:08.411509 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:08.411512 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:08.411515 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:08 GMT
	I0916 11:10:08.411792 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:08.412371 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:08.412391 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:08.412399 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:08.412402 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:08.414596 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:08.414624 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:08.414632 2188520 round_trippers.go:580]     Audit-Id: 10bfb0ba-8fee-45b9-a7cf-9fb07565b9cf
	I0916 11:10:08.414636 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:08.414639 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:08.414643 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:08.414646 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:08.414651 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:08 GMT
	I0916 11:10:08.414864 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:08.909177 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:08.909202 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:08.909213 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:08.909217 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:08.911694 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:08.911716 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:08.911724 2188520 round_trippers.go:580]     Audit-Id: bf5414b9-9444-48b3-ad42-60f0a10e4cc5
	I0916 11:10:08.911730 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:08.911735 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:08.911747 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:08.911751 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:08.911754 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:08 GMT
	I0916 11:10:08.911889 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:08.912544 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:08.912565 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:08.912574 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:08.912588 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:08.914576 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:08.914605 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:08.914613 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:08.914617 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:08 GMT
	I0916 11:10:08.914621 2188520 round_trippers.go:580]     Audit-Id: 183987dc-d16f-4092-88e5-dfbd082a42ff
	I0916 11:10:08.914624 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:08.914627 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:08.914630 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:08.914844 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:08.915217 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:09.408881 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:09.408906 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:09.408915 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:09.408919 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:09.411309 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:09.411331 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:09.411340 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:09.411344 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:09.411349 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:09.411352 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:09 GMT
	I0916 11:10:09.411356 2188520 round_trippers.go:580]     Audit-Id: 63480c28-ab51-45ce-bc00-8c32b79fef59
	I0916 11:10:09.411358 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:09.411477 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:09.412020 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:09.412037 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:09.412045 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:09.412049 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:09.413932 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:09.413954 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:09.413962 2188520 round_trippers.go:580]     Audit-Id: 832dae23-f0a8-47b4-85c7-c365074fbb57
	I0916 11:10:09.413968 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:09.413971 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:09.413973 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:09.413977 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:09.413980 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:09 GMT
	I0916 11:10:09.414150 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:09.909276 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:09.909312 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:09.909323 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:09.909327 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:09.911816 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:09.911886 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:09.911908 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:09 GMT
	I0916 11:10:09.911924 2188520 round_trippers.go:580]     Audit-Id: 9ba08f43-88a5-4dec-bd32-df35d1ef5e3a
	I0916 11:10:09.911942 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:09.911969 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:09.911990 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:09.912006 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:09.912149 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:09.912729 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:09.912747 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:09.912755 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:09.912760 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:09.914909 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:09.914932 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:09.914941 2188520 round_trippers.go:580]     Audit-Id: 3b387592-a273-4aa1-bb23-cce3c5b76f3d
	I0916 11:10:09.914946 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:09.914949 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:09.914952 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:09.914954 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:09.914957 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:09 GMT
	I0916 11:10:09.915201 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:10.409346 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:10.409373 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:10.409384 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:10.409388 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:10.411753 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:10.411777 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:10.411785 2188520 round_trippers.go:580]     Audit-Id: 6d209dba-4e48-4390-b8e2-db1ed4be2acd
	I0916 11:10:10.411791 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:10.411796 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:10.411800 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:10.411802 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:10.411806 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:10 GMT
	I0916 11:10:10.412079 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:10.412648 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:10.412667 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:10.412677 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:10.412684 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:10.414843 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:10.414866 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:10.414874 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:10 GMT
	I0916 11:10:10.414881 2188520 round_trippers.go:580]     Audit-Id: d5cf9f5c-ce86-4224-a7cb-e1dc8ea20307
	I0916 11:10:10.414885 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:10.414888 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:10.414892 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:10.414895 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:10.415162 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:10.909271 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:10.909296 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:10.909305 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:10.909310 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:10.911825 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:10.911895 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:10.911931 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:10.911954 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:10.911968 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:10.911985 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:10 GMT
	I0916 11:10:10.912014 2188520 round_trippers.go:580]     Audit-Id: e45c9f5e-9b55-43f7-9687-ce9a1be1ae74
	I0916 11:10:10.912033 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:10.912166 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:10.912767 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:10.912783 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:10.912795 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:10.912802 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:10.914902 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:10.914926 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:10.914934 2188520 round_trippers.go:580]     Audit-Id: e204749e-2d25-43d4-83f3-3d2e796c5e91
	I0916 11:10:10.914940 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:10.914945 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:10.914948 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:10.914953 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:10.914957 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:10 GMT
	I0916 11:10:10.915297 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:10.915668 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:11.409574 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:11.409606 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:11.409616 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:11.409622 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:11.412061 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:11.412086 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:11.412094 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:11.412098 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:11 GMT
	I0916 11:10:11.412101 2188520 round_trippers.go:580]     Audit-Id: 74dfae2f-9a3e-4de2-8372-8eedc2de1d7b
	I0916 11:10:11.412104 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:11.412107 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:11.412109 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:11.412432 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:11.412976 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:11.412996 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:11.413006 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:11.413012 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:11.415210 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:11.415236 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:11.415245 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:11.415249 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:11.415252 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:11.415255 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:11 GMT
	I0916 11:10:11.415258 2188520 round_trippers.go:580]     Audit-Id: fd2b5fde-f87c-44be-b215-7ba04b97d9ff
	I0916 11:10:11.415266 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:11.415547 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:11.908698 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:11.908728 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:11.908737 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:11.908743 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:11.911811 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:11.911876 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:11.911887 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:11.911891 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:11.911896 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:11 GMT
	I0916 11:10:11.911900 2188520 round_trippers.go:580]     Audit-Id: fa801329-a791-4685-a836-5e22dc3c639c
	I0916 11:10:11.911903 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:11.911906 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:11.912339 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:11.912932 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:11.912952 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:11.912966 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:11.912971 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:11.915337 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:11.915359 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:11.915365 2188520 round_trippers.go:580]     Audit-Id: 4e6cdcca-71b2-4ce7-a98b-de4d48d28e99
	I0916 11:10:11.915370 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:11.915373 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:11.915378 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:11.915382 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:11.915385 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:11 GMT
	I0916 11:10:11.915793 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:12.409533 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:12.409558 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:12.409568 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:12.409573 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:12.411893 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:12.411914 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:12.411923 2188520 round_trippers.go:580]     Audit-Id: 63974b93-fedb-4ff5-8e3f-88a041cd4a44
	I0916 11:10:12.411928 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:12.411961 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:12.411972 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:12.411976 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:12.411979 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:12 GMT
	I0916 11:10:12.412260 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:12.412802 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:12.412812 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:12.412821 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:12.412825 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:12.414913 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:12.414945 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:12.414952 2188520 round_trippers.go:580]     Audit-Id: bee04489-f6c3-4a9a-a586-89f6e130abb1
	I0916 11:10:12.414956 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:12.414959 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:12.414961 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:12.414965 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:12.414968 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:12 GMT
	I0916 11:10:12.415140 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:12.908693 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:12.908721 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:12.908731 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:12.908738 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:12.911166 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:12.911195 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:12.911202 2188520 round_trippers.go:580]     Audit-Id: 4c09c869-7a24-425b-b7eb-251cf24e346a
	I0916 11:10:12.911206 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:12.911209 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:12.911212 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:12.911214 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:12.911217 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:12 GMT
	I0916 11:10:12.911588 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:12.912146 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:12.912165 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:12.912174 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:12.912179 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:12.914261 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:12.914279 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:12.914296 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:12.914300 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:12.914304 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:12.914307 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:12 GMT
	I0916 11:10:12.914310 2188520 round_trippers.go:580]     Audit-Id: d876bf36-f92a-485e-8f6d-640515bdfedf
	I0916 11:10:12.914313 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:12.914453 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:13.409634 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:13.409658 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:13.409668 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:13.409673 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:13.412224 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:13.412250 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:13.412259 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:13.412267 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:13.412272 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:13.412275 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:13 GMT
	I0916 11:10:13.412278 2188520 round_trippers.go:580]     Audit-Id: c1266db9-62ee-4b25-866a-802af17d02a1
	I0916 11:10:13.412281 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:13.412506 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:13.413144 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:13.413166 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:13.413176 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:13.413181 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:13.415393 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:13.415446 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:13.415456 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:13.415461 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:13.415466 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:13.415470 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:13 GMT
	I0916 11:10:13.415474 2188520 round_trippers.go:580]     Audit-Id: 8fb17d09-ac78-4c41-8ead-0040184787ff
	I0916 11:10:13.415480 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:13.415760 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:13.416231 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:13.908644 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:13.908670 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:13.908680 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:13.908685 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:13.910991 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:13.911020 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:13.911029 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:13.911035 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:13.911038 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:13.911043 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:13 GMT
	I0916 11:10:13.911046 2188520 round_trippers.go:580]     Audit-Id: 4966f8c1-b51b-4873-946c-14e1a20a7b8b
	I0916 11:10:13.911051 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:13.911223 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:13.911754 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:13.911772 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:13.911780 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:13.911791 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:13.913868 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:13.913935 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:13.914005 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:13.914028 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:13.914047 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:13.914060 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:13 GMT
	I0916 11:10:13.914078 2188520 round_trippers.go:580]     Audit-Id: 49b2d1b5-6108-4fc9-9384-13e61a451d7d
	I0916 11:10:13.914107 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:13.914236 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:14.408659 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:14.408683 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:14.408691 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:14.408695 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:14.410938 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:14.410960 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:14.410968 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:14.410973 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:14.410977 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:14 GMT
	I0916 11:10:14.410980 2188520 round_trippers.go:580]     Audit-Id: b337a254-f812-4aee-b9c9-421b7dc4268a
	I0916 11:10:14.410983 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:14.410986 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:14.411419 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:14.411959 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:14.411978 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:14.411987 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:14.411992 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:14.413954 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:14.413973 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:14.413981 2188520 round_trippers.go:580]     Audit-Id: cc742c9a-53d6-4a69-8673-d8a9d134e42c
	I0916 11:10:14.413985 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:14.413988 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:14.413991 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:14.413995 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:14.413998 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:14 GMT
	I0916 11:10:14.414116 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:14.909319 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:14.909342 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:14.909352 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:14.909356 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:14.911763 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:14.911786 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:14.911795 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:14.911799 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:14.911803 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:14.911806 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:14.911810 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:14 GMT
	I0916 11:10:14.911813 2188520 round_trippers.go:580]     Audit-Id: 3c5a6210-a886-4e9a-b36c-cef60632f03f
	I0916 11:10:14.911926 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:14.912458 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:14.912468 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:14.912476 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:14.912480 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:14.914580 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:14.914636 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:14.914645 2188520 round_trippers.go:580]     Audit-Id: 14651b94-7cf4-415c-aca6-f46c49de77bd
	I0916 11:10:14.914652 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:14.914655 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:14.914659 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:14.914669 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:14.914672 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:14 GMT
	I0916 11:10:14.914812 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:15.409368 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:15.409398 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:15.409409 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.409415 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.411849 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:15.411874 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:15.411883 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.411887 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.411890 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:15.411892 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:15.411895 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.411898 2188520 round_trippers.go:580]     Audit-Id: 92d3689b-195f-4c7b-88b4-eccfa5f7c64f
	I0916 11:10:15.412181 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:15.412749 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:15.412768 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:15.412776 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.412781 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.414635 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:15.414657 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:15.414665 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.414671 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:15.414696 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:15.414701 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.414705 2188520 round_trippers.go:580]     Audit-Id: 4deb9d0e-0b42-4886-ad07-a0913068bb5d
	I0916 11:10:15.414710 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.414951 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:15.908979 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:15.909003 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:15.909013 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.909017 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.913317 2188520 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:10:15.913344 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:15.913354 2188520 round_trippers.go:580]     Audit-Id: 5f8218f7-c400-4dd1-8110-8d0041459f80
	I0916 11:10:15.913359 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.913365 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.913368 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:15.913372 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:15.913377 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.913680 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:15.914251 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:15.914262 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:15.914271 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:15.914275 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:15.916437 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:15.916461 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:15.916470 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:15.916474 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:15.916477 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:15 GMT
	I0916 11:10:15.916480 2188520 round_trippers.go:580]     Audit-Id: 36f29673-e68a-4a56-805e-3874eb0e3634
	I0916 11:10:15.916482 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:15.916485 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:15.916652 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:15.917021 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:16.409293 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:16.409322 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:16.409331 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:16.409347 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:16.411744 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:16.411765 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:16.411775 2188520 round_trippers.go:580]     Audit-Id: ed29a045-03f8-468a-8fdd-523167be8d55
	I0916 11:10:16.411779 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.411784 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.411787 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:16.411791 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:16.411794 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.412023 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:16.412560 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:16.412579 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:16.412587 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:16.412591 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:16.414459 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:16.414482 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:16.414492 2188520 round_trippers.go:580]     Audit-Id: c67d83cb-2b8c-48ef-b484-4d6fa04650bc
	I0916 11:10:16.414498 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.414502 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.414505 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:16.414508 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:16.414512 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.414623 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:16.908716 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:16.908745 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:16.908755 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:16.908759 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:16.911297 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:16.911328 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:16.911337 2188520 round_trippers.go:580]     Audit-Id: 00f354df-8a9a-491b-9652-e6a14117e7dc
	I0916 11:10:16.911342 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.911345 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.911348 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:16.911351 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:16.911353 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.911475 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:16.912021 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:16.912038 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:16.912046 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:16.912052 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:16.914268 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:16.914289 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:16.914297 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:16.914302 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:16.914306 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:16 GMT
	I0916 11:10:16.914309 2188520 round_trippers.go:580]     Audit-Id: 3405c8d8-d10e-4cf8-8b57-2e04025ae5b0
	I0916 11:10:16.914312 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:16.914315 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:16.914425 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:17.408554 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:17.408580 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:17.408590 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:17.408596 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:17.411334 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:17.411383 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:17.411410 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:17.411424 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:17.411433 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:17 GMT
	I0916 11:10:17.411436 2188520 round_trippers.go:580]     Audit-Id: 8bbc8ef4-c34a-4c53-8681-11e97840a2bd
	I0916 11:10:17.411464 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:17.411474 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:17.412226 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:17.412792 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:17.412811 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:17.412820 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:17.412826 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:17.415273 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:17.415301 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:17.415310 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:17.415317 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:17.415320 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:17.415323 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:17.415326 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:17 GMT
	I0916 11:10:17.415329 2188520 round_trippers.go:580]     Audit-Id: 76ea1822-0bde-4430-8be4-06730e23d558
	I0916 11:10:17.415881 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:17.908665 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:17.908692 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:17.908701 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:17.908706 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:17.911121 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:17.911164 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:17.911172 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:17.911176 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:17.911180 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:17.911182 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:17 GMT
	I0916 11:10:17.911185 2188520 round_trippers.go:580]     Audit-Id: 813a46e9-9d07-456b-a013-c17b98ecaf94
	I0916 11:10:17.911188 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:17.911314 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:17.911873 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:17.911892 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:17.911900 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:17.911904 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:17.914167 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:17.914195 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:17.914205 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:17.914211 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:17.914215 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:17 GMT
	I0916 11:10:17.914218 2188520 round_trippers.go:580]     Audit-Id: 5ab5e294-ba02-4ca1-a1f0-c99b7a2394be
	I0916 11:10:17.914222 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:17.914226 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:17.914414 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:18.408653 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:18.408681 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:18.408691 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:18.408697 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:18.411068 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:18.411108 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:18.411116 2188520 round_trippers.go:580]     Audit-Id: 13edc02f-9b3a-44c0-9e70-4f2a5989eb06
	I0916 11:10:18.411121 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:18.411124 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:18.411126 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:18.411130 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:18.411133 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:18 GMT
	I0916 11:10:18.411239 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:18.411753 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:18.411762 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:18.411770 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:18.411774 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:18.413924 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:18.413943 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:18.413951 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:18.413956 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:18 GMT
	I0916 11:10:18.413960 2188520 round_trippers.go:580]     Audit-Id: c789a2ba-2839-487c-98a4-28ade1ba037c
	I0916 11:10:18.413964 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:18.413989 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:18.413994 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:18.414119 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:18.414479 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:18.909561 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:18.909587 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:18.909596 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:18.909602 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:18.911847 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:18.911870 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:18.911878 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:18 GMT
	I0916 11:10:18.911881 2188520 round_trippers.go:580]     Audit-Id: 24b9e7fc-e83b-4286-82e2-d955389b1aea
	I0916 11:10:18.911884 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:18.911887 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:18.911890 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:18.911894 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:18.912009 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:18.912541 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:18.912564 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:18.912576 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:18.912582 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:18.914704 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:18.914730 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:18.914743 2188520 round_trippers.go:580]     Audit-Id: 040168c8-366e-43e8-a445-4a05c3ab8a35
	I0916 11:10:18.914747 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:18.914751 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:18.914755 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:18.914757 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:18.914760 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:18 GMT
	I0916 11:10:18.914880 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:19.408910 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:19.408941 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:19.408952 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:19.408955 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:19.411399 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:19.411432 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:19.411443 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:19.411447 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:19.411452 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:19.411459 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:19 GMT
	I0916 11:10:19.411463 2188520 round_trippers.go:580]     Audit-Id: 858271a7-62d6-41be-9bbf-c00b08864194
	I0916 11:10:19.411466 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:19.411825 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:19.412358 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:19.412368 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:19.412377 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:19.412382 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:19.414229 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:19.414244 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:19.414251 2188520 round_trippers.go:580]     Audit-Id: d9c99a50-ce60-40ea-9b42-27cde56111fb
	I0916 11:10:19.414255 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:19.414259 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:19.414263 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:19.414266 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:19.414269 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:19 GMT
	I0916 11:10:19.414371 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:19.909517 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:19.909543 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:19.909552 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:19.909557 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:19.911826 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:19.911851 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:19.911860 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:19 GMT
	I0916 11:10:19.911867 2188520 round_trippers.go:580]     Audit-Id: 8a383d98-397e-406c-ae45-e61e7dc2ce19
	I0916 11:10:19.911870 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:19.911874 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:19.911877 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:19.911880 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:19.912110 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:19.912641 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:19.912660 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:19.912668 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:19.912673 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:19.914605 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:19.914668 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:19.914716 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:19.914819 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:19.914829 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:19.914834 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:19.914839 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:19 GMT
	I0916 11:10:19.914842 2188520 round_trippers.go:580]     Audit-Id: 00467412-904c-4cf5-a08a-e861a52a5fe0
	I0916 11:10:19.914958 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:20.409585 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:20.409610 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:20.409620 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:20.409626 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:20.412085 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:20.412118 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:20.412127 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:20.412131 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:20 GMT
	I0916 11:10:20.412135 2188520 round_trippers.go:580]     Audit-Id: 9c81386a-3340-492d-b161-5b2629855177
	I0916 11:10:20.412138 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:20.412140 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:20.412143 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:20.412280 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:20.412832 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:20.412851 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:20.412861 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:20.412867 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:20.414955 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:20.415038 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:20.415055 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:20.415060 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:20 GMT
	I0916 11:10:20.415065 2188520 round_trippers.go:580]     Audit-Id: c8a2374b-d21f-4214-931e-1be7a3aa02f0
	I0916 11:10:20.415068 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:20.415071 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:20.415090 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:20.415250 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:20.415646 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:20.908638 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:20.908673 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:20.908682 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:20.908687 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:20.911891 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:20.911947 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:20.911956 2188520 round_trippers.go:580]     Audit-Id: 6ad56dde-21b2-4ed8-aef2-a5618e5dcc26
	I0916 11:10:20.911961 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:20.911965 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:20.911967 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:20.911970 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:20.911973 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:20 GMT
	I0916 11:10:20.912091 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:20.912623 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:20.912643 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:20.912652 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:20.912656 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:20.914744 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:20.914765 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:20.914774 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:20.914784 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:20.914787 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:20.914791 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:20 GMT
	I0916 11:10:20.914794 2188520 round_trippers.go:580]     Audit-Id: 91edfb77-bc96-4c39-927f-18d6536c0143
	I0916 11:10:20.914796 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:20.914950 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:21.408809 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:21.408833 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:21.408843 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:21.408847 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:21.411356 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:21.411429 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:21.411451 2188520 round_trippers.go:580]     Audit-Id: 6bbc7e26-21a5-486a-bb5e-d5eefef33c9b
	I0916 11:10:21.411471 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:21.411503 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:21.411522 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:21.411526 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:21.411531 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:21 GMT
	I0916 11:10:21.411660 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:21.412231 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:21.412248 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:21.412258 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:21.412261 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:21.414240 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:21.414258 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:21.414266 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:21.414270 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:21 GMT
	I0916 11:10:21.414275 2188520 round_trippers.go:580]     Audit-Id: 3e199787-1f00-4850-a2a7-dae29d324e62
	I0916 11:10:21.414278 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:21.414281 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:21.414284 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:21.414429 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:21.909051 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:21.909077 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:21.909087 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:21.909093 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:21.911346 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:21.911369 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:21.911377 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:21.911383 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:21.911386 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:21.911390 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:21.911393 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:21 GMT
	I0916 11:10:21.911396 2188520 round_trippers.go:580]     Audit-Id: 8ef48bca-4789-4ae0-a154-9dfae1edba4b
	I0916 11:10:21.911803 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:21.912339 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:21.912357 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:21.912365 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:21.912372 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:21.914360 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:21.914376 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:21.914384 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:21.914389 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:21.914394 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:21.914397 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:21.914400 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:21 GMT
	I0916 11:10:21.914404 2188520 round_trippers.go:580]     Audit-Id: 1b1f5bf5-9395-4058-a909-84e561c66693
	I0916 11:10:21.914523 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:22.408675 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:22.408700 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:22.408711 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:22.408716 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:22.411118 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:22.411144 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:22.411153 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:22.411157 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:22.411160 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:22.411163 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:22.411165 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:22 GMT
	I0916 11:10:22.411168 2188520 round_trippers.go:580]     Audit-Id: c2f9f4f6-9b60-44b4-b419-20b14bc67ad4
	I0916 11:10:22.411353 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:22.411896 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:22.411914 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:22.411924 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:22.411929 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:22.414083 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:22.414138 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:22.414147 2188520 round_trippers.go:580]     Audit-Id: c565203e-fa6e-4f76-beac-2b8a0606aace
	I0916 11:10:22.414152 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:22.414155 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:22.414158 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:22.414161 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:22.414163 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:22 GMT
	I0916 11:10:22.414309 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:22.908678 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:22.908705 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:22.908715 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:22.908721 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:22.911164 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:22.911216 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:22.911225 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:22.911230 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:22 GMT
	I0916 11:10:22.911233 2188520 round_trippers.go:580]     Audit-Id: bcefebcb-ca78-470d-8fcd-c79ff2da804f
	I0916 11:10:22.911237 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:22.911240 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:22.911243 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:22.911352 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:22.911876 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:22.911893 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:22.911901 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:22.911906 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:22.913979 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:22.914003 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:22.914011 2188520 round_trippers.go:580]     Audit-Id: 8071e506-0e59-4e66-a92b-807f17848c06
	I0916 11:10:22.914017 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:22.914020 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:22.914023 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:22.914027 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:22.914030 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:22 GMT
	I0916 11:10:22.914140 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:22.914505 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:23.409328 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:23.409353 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:23.409363 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:23.409368 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:23.411667 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:23.411692 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:23.411701 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:23.411707 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:23.411710 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:23 GMT
	I0916 11:10:23.411715 2188520 round_trippers.go:580]     Audit-Id: c7bc6d21-b068-4f1e-a670-46649db5f2b8
	I0916 11:10:23.411718 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:23.411721 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:23.411876 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:23.412415 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:23.412434 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:23.412444 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:23.412455 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:23.414565 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:23.414589 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:23.414596 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:23.414600 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:23.414604 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:23.414608 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:23.414612 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:23 GMT
	I0916 11:10:23.414615 2188520 round_trippers.go:580]     Audit-Id: b07226a4-b8a2-4beb-8b5c-928846c43508
	I0916 11:10:23.414805 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:23.909039 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:23.909065 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:23.909075 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:23.909080 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:23.911542 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:23.911566 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:23.911575 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:23 GMT
	I0916 11:10:23.911580 2188520 round_trippers.go:580]     Audit-Id: 25e0326c-9d66-4bd1-86e2-e45352efd720
	I0916 11:10:23.911584 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:23.911588 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:23.911591 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:23.911594 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:23.911706 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:23.912225 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:23.912234 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:23.912243 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:23.912248 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:23.914086 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:23.914143 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:23.914164 2188520 round_trippers.go:580]     Audit-Id: db8fa257-e42c-48e5-b8f4-3ae185f70891
	I0916 11:10:23.914175 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:23.914178 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:23.914183 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:23.914187 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:23.914189 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:23 GMT
	I0916 11:10:23.914325 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:24.409447 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:24.409476 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:24.409486 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:24.409490 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:24.411908 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:24.411935 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:24.411943 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:24.411948 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:24.411951 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:24 GMT
	I0916 11:10:24.411955 2188520 round_trippers.go:580]     Audit-Id: fc5d8162-0f6a-4540-9003-1e0d07ee1d15
	I0916 11:10:24.411957 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:24.411960 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:24.412076 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:24.412615 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:24.412633 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:24.412642 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:24.412650 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:24.414977 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:24.415005 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:24.415014 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:24.415019 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:24.415024 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:24.415027 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:24.415031 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:24 GMT
	I0916 11:10:24.415034 2188520 round_trippers.go:580]     Audit-Id: a01c60e9-2155-46aa-b3fb-539f05a5f00d
	I0916 11:10:24.415277 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:24.908661 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:24.908691 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:24.908700 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:24.908704 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:24.911177 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:24.911214 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:24.911223 2188520 round_trippers.go:580]     Audit-Id: 9b7ff190-99e1-43ca-b18e-af4af8434a9b
	I0916 11:10:24.911228 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:24.911232 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:24.911236 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:24.911239 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:24.911244 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:24 GMT
	I0916 11:10:24.911490 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:24.912023 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:24.912039 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:24.912049 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:24.912058 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:24.914097 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:24.914120 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:24.914128 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:24.914133 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:24.914137 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:24 GMT
	I0916 11:10:24.914140 2188520 round_trippers.go:580]     Audit-Id: 4f1f50a7-640b-4c5f-895e-f0401d9f265e
	I0916 11:10:24.914143 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:24.914145 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:24.914481 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:24.914883 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:25.409017 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:25.409043 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:25.409053 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:25.409059 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:25.411287 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:25.411313 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:25.411322 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:25.411327 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:25 GMT
	I0916 11:10:25.411332 2188520 round_trippers.go:580]     Audit-Id: 52e86152-1abf-4b69-9a4e-86fd4a74801f
	I0916 11:10:25.411335 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:25.411370 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:25.411379 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:25.411552 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:25.412082 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:25.412100 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:25.412109 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:25.412114 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:25.414065 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:25.414083 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:25.414091 2188520 round_trippers.go:580]     Audit-Id: e24b5952-fb69-4978-9d4a-8a1000b62428
	I0916 11:10:25.414096 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:25.414099 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:25.414104 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:25.414107 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:25.414109 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:25 GMT
	I0916 11:10:25.414246 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:25.909356 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:25.909384 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:25.909394 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:25.909400 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:25.911711 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:25.911734 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:25.911742 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:25.911747 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:25 GMT
	I0916 11:10:25.911751 2188520 round_trippers.go:580]     Audit-Id: 6dbae478-4ce1-402a-bf90-af52f89bd0f0
	I0916 11:10:25.911754 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:25.911757 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:25.911759 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:25.912155 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:25.912701 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:25.912722 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:25.912732 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:25.912739 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:25.914791 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:25.914811 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:25.914819 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:25.914824 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:25.914828 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:25 GMT
	I0916 11:10:25.914831 2188520 round_trippers.go:580]     Audit-Id: c74bc011-e091-40fd-96b0-88a227413370
	I0916 11:10:25.914833 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:25.914836 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:25.915015 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:26.408593 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:26.408621 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:26.408631 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:26.408636 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:26.411121 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:26.411144 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:26.411153 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:26 GMT
	I0916 11:10:26.411157 2188520 round_trippers.go:580]     Audit-Id: 1bbf51b0-3ee8-4e5a-b633-78b586af7719
	I0916 11:10:26.411159 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:26.411162 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:26.411165 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:26.411168 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:26.411276 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:26.411793 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:26.411802 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:26.411810 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:26.411816 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:26.413807 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:26.413826 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:26.413833 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:26.413837 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:26.413841 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:26.413844 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:26.413847 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:26 GMT
	I0916 11:10:26.413849 2188520 round_trippers.go:580]     Audit-Id: e79f93ff-8f05-43c5-958c-7e93ac84cd6c
	I0916 11:10:26.413965 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:26.908696 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:26.908720 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:26.908729 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:26.908734 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:26.910920 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:26.910943 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:26.910952 2188520 round_trippers.go:580]     Audit-Id: b0738a3b-9633-4fea-9d30-9ded7f458a71
	I0916 11:10:26.910961 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:26.910964 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:26.910967 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:26.910970 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:26.910974 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:26 GMT
	I0916 11:10:26.911555 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:26.912095 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:26.912105 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:26.912113 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:26.912117 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:26.914138 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:26.914161 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:26.914177 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:26.914183 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:26.914187 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:26.914190 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:26.914195 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:26 GMT
	I0916 11:10:26.914198 2188520 round_trippers.go:580]     Audit-Id: b711eb0d-cf80-4eb9-bd02-728c2317109e
	I0916 11:10:26.914310 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:27.409437 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:27.409462 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:27.409473 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:27.409478 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:27.412229 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:27.412254 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:27.412263 2188520 round_trippers.go:580]     Audit-Id: d7f4c36b-ab52-4792-a4f1-3e02a81b65c5
	I0916 11:10:27.412267 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:27.412271 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:27.412275 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:27.412279 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:27.412282 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:27 GMT
	I0916 11:10:27.412591 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:27.413126 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:27.413143 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:27.413152 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:27.413159 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:27.415441 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:27.415467 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:27.415476 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:27.415484 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:27 GMT
	I0916 11:10:27.415487 2188520 round_trippers.go:580]     Audit-Id: 60026adf-e120-4ab3-8965-bdb1ee0e83ff
	I0916 11:10:27.415491 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:27.415503 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:27.415513 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:27.415698 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:27.416145 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:27.908831 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:27.908854 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:27.908864 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:27.908873 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:27.911210 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:27.911229 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:27.911237 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:27.911241 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:27.911246 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:27.911249 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:27.911252 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:27 GMT
	I0916 11:10:27.911255 2188520 round_trippers.go:580]     Audit-Id: e1514137-3204-45f2-9f69-789a87dad337
	I0916 11:10:27.911362 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:27.911869 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:27.911878 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:27.911886 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:27.911890 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:27.913748 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:27.913763 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:27.913771 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:27.913775 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:27.913780 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:27.913785 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:27 GMT
	I0916 11:10:27.913788 2188520 round_trippers.go:580]     Audit-Id: 9d551456-0e33-4582-8c50-d8d3273f7ac1
	I0916 11:10:27.913790 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:27.913925 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:28.408626 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:28.408648 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:28.408657 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:28.408662 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:28.411029 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:28.411078 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:28.411086 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:28.411091 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:28 GMT
	I0916 11:10:28.411095 2188520 round_trippers.go:580]     Audit-Id: 998ea2fa-b263-4730-b362-0efac12c5fec
	I0916 11:10:28.411098 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:28.411101 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:28.411104 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:28.411239 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:28.411785 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:28.411800 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:28.411808 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:28.411812 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:28.413720 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:28.413741 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:28.413750 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:28.413755 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:28.413767 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:28.413770 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:28 GMT
	I0916 11:10:28.413773 2188520 round_trippers.go:580]     Audit-Id: 63ee6b07-5394-467d-9fcf-f45480d7f018
	I0916 11:10:28.413775 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:28.413890 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:28.909219 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:28.909245 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:28.909255 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:28.909261 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:28.911579 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:28.911638 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:28.911647 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:28.911652 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:28 GMT
	I0916 11:10:28.911655 2188520 round_trippers.go:580]     Audit-Id: 825c7ca9-2da8-41cd-8561-036bf8a6785a
	I0916 11:10:28.911658 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:28.911661 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:28.911664 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:28.911774 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:28.912313 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:28.912328 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:28.912337 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:28.912342 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:28.914298 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:28.914321 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:28.914336 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:28.914340 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:28 GMT
	I0916 11:10:28.914344 2188520 round_trippers.go:580]     Audit-Id: 2bbe69c3-bfb5-4ebe-92f0-d6f12e8583e8
	I0916 11:10:28.914346 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:28.914349 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:28.914352 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:28.914471 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:29.409483 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:29.409510 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:29.409520 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:29.409524 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:29.411919 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:29.411975 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:29.411984 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:29.411988 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:29.411993 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:29.411997 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:29 GMT
	I0916 11:10:29.412023 2188520 round_trippers.go:580]     Audit-Id: 06205db6-ca87-436e-8c97-6229dc98823f
	I0916 11:10:29.412034 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:29.412150 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:29.412745 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:29.412764 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:29.412773 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:29.412778 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:29.414668 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:29.414719 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:29.414732 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:29.414736 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:29.414739 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:29 GMT
	I0916 11:10:29.414742 2188520 round_trippers.go:580]     Audit-Id: 1b2983ba-194e-48d0-9fe6-8f0e379cdf50
	I0916 11:10:29.414745 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:29.414748 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:29.414864 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:29.908659 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:29.908684 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:29.908694 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:29.908699 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:29.911090 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:29.911155 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:29.911177 2188520 round_trippers.go:580]     Audit-Id: 05f0cd6a-7d3e-461c-a8af-60528489f79d
	I0916 11:10:29.911197 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:29.911213 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:29.911249 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:29.911267 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:29.911283 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:29 GMT
	I0916 11:10:29.911407 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:29.911987 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:29.912031 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:29.912054 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:29.912072 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:29.914064 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:29.914084 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:29.914092 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:29.914096 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:29.914099 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:29.914103 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:29 GMT
	I0916 11:10:29.914105 2188520 round_trippers.go:580]     Audit-Id: 22f80505-dc19-40f2-97c1-c8a1c559b727
	I0916 11:10:29.914108 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:29.914221 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:29.914571 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:30.409319 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:30.409343 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:30.409353 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:30.409357 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:30.411699 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:30.411726 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:30.411735 2188520 round_trippers.go:580]     Audit-Id: 1f6469a5-3d54-45b8-a769-daef79b8932f
	I0916 11:10:30.411740 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:30.411744 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:30.411747 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:30.411750 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:30.411755 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:30 GMT
	I0916 11:10:30.411879 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:30.412415 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:30.412435 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:30.412446 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:30.412450 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:30.414647 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:30.414671 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:30.414701 2188520 round_trippers.go:580]     Audit-Id: 13873a79-79a7-4f0d-8d6e-f002b8c98876
	I0916 11:10:30.414706 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:30.414709 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:30.414711 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:30.414714 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:30.414717 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:30 GMT
	I0916 11:10:30.414938 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:30.908647 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:30.908672 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:30.908681 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:30.908685 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:30.910973 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:30.910994 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:30.911002 2188520 round_trippers.go:580]     Audit-Id: 94a6dbfd-6684-4ba9-9c21-a528bd1b7a94
	I0916 11:10:30.911008 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:30.911011 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:30.911014 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:30.911017 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:30.911019 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:30 GMT
	I0916 11:10:30.911228 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:30.911801 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:30.911819 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:30.911827 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:30.911833 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:30.913785 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:30.913806 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:30.913815 2188520 round_trippers.go:580]     Audit-Id: 737eca57-dcea-4622-91bc-611da1a25fbe
	I0916 11:10:30.913821 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:30.913824 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:30.913827 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:30.913830 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:30.913834 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:30 GMT
	I0916 11:10:30.914096 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:31.408980 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:31.409005 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:31.409016 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:31.409021 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:31.411314 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:31.411337 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:31.411346 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:31 GMT
	I0916 11:10:31.411351 2188520 round_trippers.go:580]     Audit-Id: 25b3e342-71ac-4227-a2f4-9469876b064b
	I0916 11:10:31.411354 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:31.411358 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:31.411361 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:31.411364 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:31.411799 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:31.412334 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:31.412344 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:31.412352 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:31.412357 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:31.414365 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:31.414450 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:31.414467 2188520 round_trippers.go:580]     Audit-Id: e028b22f-242d-42cc-a15e-3289f70e50b9
	I0916 11:10:31.414472 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:31.414476 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:31.414479 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:31.414481 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:31.414501 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:31 GMT
	I0916 11:10:31.414632 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:31.909181 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:31.909208 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:31.909217 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:31.909222 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:31.911546 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:31.911567 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:31.911575 2188520 round_trippers.go:580]     Audit-Id: 315c2fcd-c21e-4e40-b37d-ff40cc68f556
	I0916 11:10:31.911580 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:31.911583 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:31.911585 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:31.911588 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:31.911591 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:31 GMT
	I0916 11:10:31.911683 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:31.912243 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:31.912263 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:31.912271 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:31.912277 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:31.914397 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:31.914466 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:31.914488 2188520 round_trippers.go:580]     Audit-Id: 649e3f04-4bc0-4cd3-b514-b86c6816b5a4
	I0916 11:10:31.914504 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:31.914558 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:31.914568 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:31.914572 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:31.914576 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:31 GMT
	I0916 11:10:31.914699 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:31.915073 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:32.409256 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:32.409281 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:32.409291 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:32.409298 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:32.411818 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:32.411843 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:32.411851 2188520 round_trippers.go:580]     Audit-Id: a7a21571-bcbc-42d4-83fb-3b46767f231f
	I0916 11:10:32.411876 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:32.411882 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:32.411904 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:32.411907 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:32.411910 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:32 GMT
	I0916 11:10:32.412026 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:32.412577 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:32.412596 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:32.412604 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:32.412609 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:32.414918 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:32.414949 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:32.414960 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:32.414966 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:32.414971 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:32.414974 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:32.414977 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:32 GMT
	I0916 11:10:32.414979 2188520 round_trippers.go:580]     Audit-Id: 1847e788-11d7-4613-80e4-a8c30fe73f19
	I0916 11:10:32.415108 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:32.908663 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:32.908695 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:32.908705 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:32.908711 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:32.911258 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:32.911281 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:32.911288 2188520 round_trippers.go:580]     Audit-Id: de3e1eed-8f33-47a2-9357-676dc70a37f7
	I0916 11:10:32.911292 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:32.911295 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:32.911298 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:32.911300 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:32.911303 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:32 GMT
	I0916 11:10:32.911412 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:32.912028 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:32.912047 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:32.912056 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:32.912060 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:32.914190 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:32.914209 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:32.914217 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:32 GMT
	I0916 11:10:32.914221 2188520 round_trippers.go:580]     Audit-Id: 78a98f71-19ed-4423-b1f8-ac2b0182e6c5
	I0916 11:10:32.914224 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:32.914227 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:32.914230 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:32.914232 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:32.914363 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:33.409561 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:33.409587 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:33.409597 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:33.409601 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:33.411929 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:33.411955 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:33.411964 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:33.411969 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:33 GMT
	I0916 11:10:33.411972 2188520 round_trippers.go:580]     Audit-Id: 31fe7b79-2d80-4850-a818-4a585785cc86
	I0916 11:10:33.411975 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:33.411979 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:33.411981 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:33.412362 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:33.412931 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:33.412949 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:33.412959 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:33.412963 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:33.415038 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:33.415061 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:33.415068 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:33 GMT
	I0916 11:10:33.415072 2188520 round_trippers.go:580]     Audit-Id: bef4ec71-23e0-4e51-b39a-390b9865fbbf
	I0916 11:10:33.415076 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:33.415079 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:33.415081 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:33.415084 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:33.415195 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:33.909450 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:33.909480 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:33.909490 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:33.909495 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:33.911975 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:33.911996 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:33.912010 2188520 round_trippers.go:580]     Audit-Id: c2151fd1-40e3-42f8-aeb3-34c07cbcd9b5
	I0916 11:10:33.912014 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:33.912017 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:33.912019 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:33.912022 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:33.912024 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:33 GMT
	I0916 11:10:33.912217 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:33.912785 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:33.912801 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:33.912812 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:33.912820 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:33.915072 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:33.915100 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:33.915111 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:33.915116 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:33.915121 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:33 GMT
	I0916 11:10:33.915125 2188520 round_trippers.go:580]     Audit-Id: 9892528f-bdba-419c-8f37-7dbb65e3024c
	I0916 11:10:33.915129 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:33.915132 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:33.915401 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:33.915780 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:34.409579 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:34.409606 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:34.409617 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:34.409621 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:34.412239 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:34.412316 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:34.412332 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:34 GMT
	I0916 11:10:34.412339 2188520 round_trippers.go:580]     Audit-Id: 6f385a9b-adca-4c07-acab-e77b43fb9361
	I0916 11:10:34.412342 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:34.412346 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:34.412348 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:34.412351 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:34.412562 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:34.413096 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:34.413106 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:34.413114 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:34.413119 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:34.415270 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:34.415290 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:34.415297 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:34.415301 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:34 GMT
	I0916 11:10:34.415305 2188520 round_trippers.go:580]     Audit-Id: 164428d5-9b28-4ef5-9bbf-fdbd5c3ab648
	I0916 11:10:34.415308 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:34.415311 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:34.415314 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:34.415430 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:34.909530 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:34.909554 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:34.909564 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:34.909568 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:34.912005 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:34.912040 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:34.912050 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:34.912054 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:34.912058 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:34 GMT
	I0916 11:10:34.912061 2188520 round_trippers.go:580]     Audit-Id: 55d23501-5137-4267-b79b-39a4b0dbc330
	I0916 11:10:34.912065 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:34.912068 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:34.912551 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:34.913090 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:34.913106 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:34.913116 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:34.913120 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:34.915260 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:34.915289 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:34.915299 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:34 GMT
	I0916 11:10:34.915303 2188520 round_trippers.go:580]     Audit-Id: ebb6071e-7be6-4b5f-adff-122ec516a8da
	I0916 11:10:34.915306 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:34.915309 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:34.915318 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:34.915322 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:34.915450 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:35.408694 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:35.408722 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:35.408738 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:35.408744 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:35.411706 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:35.411862 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:35.411893 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:35.411912 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:35.411929 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:35.411948 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:35 GMT
	I0916 11:10:35.411973 2188520 round_trippers.go:580]     Audit-Id: db5b8c31-056e-42a2-806b-1104ec649c2f
	I0916 11:10:35.411995 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:35.412352 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:35.412882 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:35.412893 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:35.412901 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:35.412907 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:35.416354 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:35.416376 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:35.416385 2188520 round_trippers.go:580]     Audit-Id: 3416bcc4-793c-42b4-a0b9-885d8ac05c2d
	I0916 11:10:35.416395 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:35.416398 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:35.416401 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:35.416404 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:35.416407 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:35 GMT
	I0916 11:10:35.417490 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:35.908552 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:35.908577 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:35.908586 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:35.908590 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:35.911104 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:35.911138 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:35.911173 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:35.911178 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:35.911181 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:35.911186 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:35 GMT
	I0916 11:10:35.911196 2188520 round_trippers.go:580]     Audit-Id: f5a541d2-9ea1-456d-9523-e5d44e466a35
	I0916 11:10:35.911202 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:35.911307 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:35.911844 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:35.911860 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:35.911868 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:35.911872 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:35.913957 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:35.913980 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:35.914009 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:35.914016 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:35.914020 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:35.914024 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:35.914032 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:35 GMT
	I0916 11:10:35.914035 2188520 round_trippers.go:580]     Audit-Id: 70a047e1-ea1e-4eb6-8e32-a46f0f89b6ea
	I0916 11:10:35.914153 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:36.408640 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:36.408667 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:36.408676 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:36.408680 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:36.411161 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:36.411196 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:36.411204 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:36.411246 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:36.411256 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:36.411260 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:36 GMT
	I0916 11:10:36.411264 2188520 round_trippers.go:580]     Audit-Id: 531898bf-d975-4417-b085-c4a8df96cf9f
	I0916 11:10:36.411267 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:36.411524 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:36.412075 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:36.412094 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:36.412104 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:36.412110 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:36.414151 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:36.414176 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:36.414185 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:36.414189 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:36.414194 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:36 GMT
	I0916 11:10:36.414197 2188520 round_trippers.go:580]     Audit-Id: 660be57a-aa03-4b3f-8909-37f609ac0791
	I0916 11:10:36.414200 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:36.414203 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:36.414317 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:36.414707 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:36.909495 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:36.909520 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:36.909530 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:36.909534 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:36.912033 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:36.912056 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:36.912070 2188520 round_trippers.go:580]     Audit-Id: 5009f2c9-ff30-40fe-a2ad-7f76fe660103
	I0916 11:10:36.912075 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:36.912079 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:36.912082 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:36.912085 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:36.912089 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:36 GMT
	I0916 11:10:36.912430 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:36.912964 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:36.912982 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:36.912991 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:36.912996 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:36.915213 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:36.915237 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:36.915246 2188520 round_trippers.go:580]     Audit-Id: 57e85011-f083-4634-a85d-4c0729101e10
	I0916 11:10:36.915250 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:36.915253 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:36.915258 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:36.915261 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:36.915263 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:36 GMT
	I0916 11:10:36.915446 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:37.409563 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:37.409589 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:37.409598 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:37.409603 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:37.412164 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:37.412190 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:37.412198 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:37.412203 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:37.412207 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:37 GMT
	I0916 11:10:37.412210 2188520 round_trippers.go:580]     Audit-Id: 114e5c90-2881-4078-9366-9e668ace2115
	I0916 11:10:37.412213 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:37.412216 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:37.412433 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:37.412969 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:37.412987 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:37.412996 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:37.413003 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:37.414994 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:37.415021 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:37.415030 2188520 round_trippers.go:580]     Audit-Id: a29e8862-033c-4ad7-b927-9b100601e771
	I0916 11:10:37.415035 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:37.415038 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:37.415041 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:37.415044 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:37.415047 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:37 GMT
	I0916 11:10:37.415162 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:37.909328 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:37.909357 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:37.909368 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:37.909375 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:37.911891 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:37.911917 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:37.911925 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:37.911932 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:37.911935 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:37.911942 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:37.911945 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:37 GMT
	I0916 11:10:37.911948 2188520 round_trippers.go:580]     Audit-Id: c940a6df-0b93-4cb7-a5de-aaadedcec945
	I0916 11:10:37.912118 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:37.912715 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:37.912726 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:37.912735 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:37.912739 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:37.915154 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:37.915177 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:37.915185 2188520 round_trippers.go:580]     Audit-Id: d63900aa-8a4c-4603-801c-f1dcb0e7d47e
	I0916 11:10:37.915190 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:37.915194 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:37.915199 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:37.915202 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:37.915205 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:37 GMT
	I0916 11:10:37.915319 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:38.409256 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:38.409284 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:38.409296 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.409300 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.411798 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:38.411823 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:38.411833 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.411836 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:38.411840 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:38.411843 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.411846 2188520 round_trippers.go:580]     Audit-Id: b026e1c5-eff2-409b-931a-01e956598fb2
	I0916 11:10:38.411848 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.412076 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:38.412613 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:38.412631 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:38.412640 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.412644 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.414814 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:38.414833 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:38.414842 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.414847 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:38.414852 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:38.414855 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.414858 2188520 round_trippers.go:580]     Audit-Id: 576aa639-eea6-42b5-abe6-8aaf282a9111
	I0916 11:10:38.414860 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.415004 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"664","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 4923 chars]
	I0916 11:10:38.415359 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:38.909407 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:10:38.909432 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:38.909441 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.909445 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.912696 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:38.912719 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:38.912728 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.912734 2188520 round_trippers.go:580]     Audit-Id: 80a0e78f-e20f-48e9-bf09-7e36df424118
	I0916 11:10:38.912737 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.912740 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.912742 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:38.912745 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:38.914350 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:10:38.914969 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:10:38.914990 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:38.914999 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.915009 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.921304 2188520 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:10:38.921333 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:38.921342 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:38.921355 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:38.921360 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.921364 2188520 round_trippers.go:580]     Audit-Id: 5dbecc74-0e86-4bc6-a98f-af20b7c0867d
	I0916 11:10:38.921366 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.921369 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.922016 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"802","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5582 chars]
	I0916 11:10:38.922485 2188520 pod_ready.go:98] node "multinode-890146-m03" hosting pod "kube-proxy-vl27g" in "kube-system" namespace is currently not "Ready" (skipping!): node "multinode-890146-m03" has status "Ready":"Unknown"
	I0916 11:10:38.922508 2188520 pod_ready.go:82] duration metric: took 41.514109975s for pod "kube-proxy-vl27g" in "kube-system" namespace to be "Ready" ...
	E0916 11:10:38.922519 2188520 pod_ready.go:67] WaitExtra: waitPodCondition: node "multinode-890146-m03" hosting pod "kube-proxy-vl27g" in "kube-system" namespace is currently not "Ready" (skipping!): node "multinode-890146-m03" has status "Ready":"Unknown"
	I0916 11:10:38.922527 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:38.922599 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:10:38.922609 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:38.922617 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.922622 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.925861 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:38.925890 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:38.925899 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:38.925904 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:38.925907 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.925910 2188520 round_trippers.go:580]     Audit-Id: c802981b-3ca2-49d5-9181-8d452d43cdd1
	I0916 11:10:38.925912 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.925915 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.926021 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"779","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5188 chars]
	I0916 11:10:38.926513 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:10:38.926529 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:38.926556 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.926561 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.929271 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:38.929329 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:38.929338 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.929344 2188520 round_trippers.go:580]     Audit-Id: ce0863b4-e2f4-4281-901b-6f90d2265255
	I0916 11:10:38.929348 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.929350 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.929355 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:38.929376 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:38.929494 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:10:38.929924 2188520 pod_ready.go:93] pod "kube-scheduler-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:38.929946 2188520 pod_ready.go:82] duration metric: took 7.408158ms for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:38.929958 2188520 pod_ready.go:39] duration metric: took 43.326632669s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:10:38.929983 2188520 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:10:38.930069 2188520 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:10:38.943684 2188520 command_runner.go:130] > 1067
	I0916 11:10:38.945194 2188520 api_server.go:72] duration metric: took 47.623282708s to wait for apiserver process to appear ...
	I0916 11:10:38.945219 2188520 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:10:38.945242 2188520 api_server.go:253] Checking apiserver healthz at https://192.168.58.2:8443/healthz ...
	I0916 11:10:38.953785 2188520 api_server.go:279] https://192.168.58.2:8443/healthz returned 200:
	ok
	I0916 11:10:38.953869 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/version
	I0916 11:10:38.953881 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:38.953891 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.953901 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.954868 2188520 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 11:10:38.954889 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:38.954898 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.954902 2188520 round_trippers.go:580]     Audit-Id: 0dbc0519-a9be-4b23-b52b-330f2bc26217
	I0916 11:10:38.954906 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.954910 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.954914 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:38.954917 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:38.954920 2188520 round_trippers.go:580]     Content-Length: 263
	I0916 11:10:38.955225 2188520 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 11:10:38.955327 2188520 api_server.go:141] control plane version: v1.31.1
	I0916 11:10:38.955349 2188520 api_server.go:131] duration metric: took 10.12276ms to wait for apiserver health ...
	I0916 11:10:38.955358 2188520 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:10:38.955424 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:10:38.955435 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:38.955443 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.955449 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.961273 2188520 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:10:38.961304 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:38.961314 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.961319 2188520 round_trippers.go:580]     Audit-Id: 752bc4cb-f97b-495d-8617-62dda3fd54cf
	I0916 11:10:38.961322 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.961324 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.961327 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:38.961330 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:38.961815 2188520 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"815"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"792","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 91555 chars]
	I0916 11:10:38.965719 2188520 system_pods.go:59] 12 kube-system pods found
	I0916 11:10:38.965755 2188520 system_pods.go:61] "coredns-7c65d6cfc9-vp22b" [a6adb735-448b-480b-aba1-3ce4d56c6fc7] Running
	I0916 11:10:38.965764 2188520 system_pods.go:61] "etcd-multinode-890146" [59c960ab-f6dd-4ed3-856c-ba2a02295b12] Running
	I0916 11:10:38.965770 2188520 system_pods.go:61] "kindnet-4sjj6" [0ee7a4e2-91b7-4249-9e02-04f1ae301cce] Running
	I0916 11:10:38.965774 2188520 system_pods.go:61] "kindnet-dbrhk" [24ef6be7-a1ab-41f7-83c8-aa5af5007281] Running
	I0916 11:10:38.965781 2188520 system_pods.go:61] "kindnet-ndgrk" [05cf469d-f130-4d9e-9540-6c8ae5be1e57] Running / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0916 11:10:38.965787 2188520 system_pods.go:61] "kube-apiserver-multinode-890146" [9846229d-7227-453f-8c30-697aaba61648] Running
	I0916 11:10:38.965793 2188520 system_pods.go:61] "kube-controller-manager-multinode-890146" [421b15a5-a8e2-4631-bf18-1592c8747b09] Running
	I0916 11:10:38.965803 2188520 system_pods.go:61] "kube-proxy-59f9h" [a9a614fd-3de3-4fa0-b773-5d6a6054d0ea] Running
	I0916 11:10:38.965807 2188520 system_pods.go:61] "kube-proxy-fm5qr" [8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73] Running
	I0916 11:10:38.965814 2188520 system_pods.go:61] "kube-proxy-vl27g" [8f7e9a8c-6e70-4445-b85e-11c5c03701be] Running / Ready:ContainersNotReady (containers with unready status: [kube-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-proxy])
	I0916 11:10:38.965822 2188520 system_pods.go:61] "kube-scheduler-multinode-890146" [6d440ef3-2e6e-4db9-8c28-2417f7a80c9f] Running
	I0916 11:10:38.965828 2188520 system_pods.go:61] "storage-provisioner" [97795413-5c7a-480b-9cbd-18d4dea5669b] Running / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
	I0916 11:10:38.965834 2188520 system_pods.go:74] duration metric: took 10.466823ms to wait for pod list to return data ...
	I0916 11:10:38.965847 2188520 default_sa.go:34] waiting for default service account to be created ...
	I0916 11:10:38.965922 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:10:38.965932 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:38.965940 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.965946 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.970070 2188520 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:10:38.970109 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:38.970119 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.970124 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:38.970127 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:38.970130 2188520 round_trippers.go:580]     Content-Length: 261
	I0916 11:10:38.970133 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.970136 2188520 round_trippers.go:580]     Audit-Id: 6c1afe68-2176-4d14-9e18-56c258894081
	I0916 11:10:38.970140 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.970404 2188520 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"815"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"05c19a8a-7c83-4ce8-b18d-3bc9431ca644","resourceVersion":"353","creationTimestamp":"2024-09-16T11:07:37Z"}}]}
	I0916 11:10:38.970622 2188520 default_sa.go:45] found service account: "default"
	I0916 11:10:38.970645 2188520 default_sa.go:55] duration metric: took 4.789948ms for default service account to be created ...
	I0916 11:10:38.970655 2188520 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 11:10:38.970797 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:10:38.970811 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:38.970819 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.970824 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.973916 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:38.973940 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:38.973948 2188520 round_trippers.go:580]     Audit-Id: bd6fe4a4-400d-4f34-aec7-534bf33d64b2
	I0916 11:10:38.973953 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.973955 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.973958 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:38.973961 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:38.973964 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.974879 2188520 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"815"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"792","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 91555 chars]
	I0916 11:10:38.978771 2188520 system_pods.go:86] 12 kube-system pods found
	I0916 11:10:38.978804 2188520 system_pods.go:89] "coredns-7c65d6cfc9-vp22b" [a6adb735-448b-480b-aba1-3ce4d56c6fc7] Running
	I0916 11:10:38.978811 2188520 system_pods.go:89] "etcd-multinode-890146" [59c960ab-f6dd-4ed3-856c-ba2a02295b12] Running
	I0916 11:10:38.978816 2188520 system_pods.go:89] "kindnet-4sjj6" [0ee7a4e2-91b7-4249-9e02-04f1ae301cce] Running
	I0916 11:10:38.978820 2188520 system_pods.go:89] "kindnet-dbrhk" [24ef6be7-a1ab-41f7-83c8-aa5af5007281] Running
	I0916 11:10:38.978829 2188520 system_pods.go:89] "kindnet-ndgrk" [05cf469d-f130-4d9e-9540-6c8ae5be1e57] Running / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0916 11:10:38.978834 2188520 system_pods.go:89] "kube-apiserver-multinode-890146" [9846229d-7227-453f-8c30-697aaba61648] Running
	I0916 11:10:38.978839 2188520 system_pods.go:89] "kube-controller-manager-multinode-890146" [421b15a5-a8e2-4631-bf18-1592c8747b09] Running
	I0916 11:10:38.978844 2188520 system_pods.go:89] "kube-proxy-59f9h" [a9a614fd-3de3-4fa0-b773-5d6a6054d0ea] Running
	I0916 11:10:38.978848 2188520 system_pods.go:89] "kube-proxy-fm5qr" [8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73] Running
	I0916 11:10:38.978855 2188520 system_pods.go:89] "kube-proxy-vl27g" [8f7e9a8c-6e70-4445-b85e-11c5c03701be] Running / Ready:ContainersNotReady (containers with unready status: [kube-proxy]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-proxy])
	I0916 11:10:38.978860 2188520 system_pods.go:89] "kube-scheduler-multinode-890146" [6d440ef3-2e6e-4db9-8c28-2417f7a80c9f] Running
	I0916 11:10:38.978867 2188520 system_pods.go:89] "storage-provisioner" [97795413-5c7a-480b-9cbd-18d4dea5669b] Running / Ready:ContainersNotReady (containers with unready status: [storage-provisioner]) / ContainersReady:ContainersNotReady (containers with unready status: [storage-provisioner])
	I0916 11:10:38.978876 2188520 system_pods.go:126] duration metric: took 8.212674ms to wait for k8s-apps to be running ...
	I0916 11:10:38.978888 2188520 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:10:38.978953 2188520 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:10:38.991595 2188520 system_svc.go:56] duration metric: took 12.695825ms WaitForService to wait for kubelet
	I0916 11:10:38.991628 2188520 kubeadm.go:582] duration metric: took 47.669729671s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:10:38.991659 2188520 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:10:38.991746 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes
	I0916 11:10:38.991755 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:38.991764 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:38.991768 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:38.994453 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:38.994480 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:38.994496 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:38.994501 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:38.994504 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:38.994507 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:38.994511 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:38 GMT
	I0916 11:10:38.994514 2188520 round_trippers.go:580]     Audit-Id: 52719c9f-42c3-40b2-bc5a-47346dcb6d82
	I0916 11:10:38.994885 2188520 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"815"},"items":[{"metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"manag
edFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1", [truncated 18668 chars]
	I0916 11:10:38.995936 2188520 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:10:38.995966 2188520 node_conditions.go:123] node cpu capacity is 2
	I0916 11:10:38.995985 2188520 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:10:38.995991 2188520 node_conditions.go:123] node cpu capacity is 2
	I0916 11:10:38.995996 2188520 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:10:38.996004 2188520 node_conditions.go:123] node cpu capacity is 2
	I0916 11:10:38.996009 2188520 node_conditions.go:105] duration metric: took 4.344141ms to run NodePressure ...
	I0916 11:10:38.996025 2188520 start.go:241] waiting for startup goroutines ...
	I0916 11:10:38.996033 2188520 start.go:246] waiting for cluster config update ...
	I0916 11:10:38.996052 2188520 start.go:255] writing updated cluster config ...
	I0916 11:10:38.998504 2188520 out.go:201] 
	I0916 11:10:39.000812 2188520 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:10:39.000946 2188520 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:10:39.008497 2188520 out.go:177] * Starting "multinode-890146-m02" worker node in "multinode-890146" cluster
	I0916 11:10:39.010196 2188520 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:10:39.012310 2188520 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:10:39.014364 2188520 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:10:39.014418 2188520 cache.go:56] Caching tarball of preloaded images
	I0916 11:10:39.014466 2188520 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:10:39.014548 2188520 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 11:10:39.014566 2188520 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 11:10:39.014838 2188520 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	W0916 11:10:39.041619 2188520 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:10:39.041645 2188520 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:10:39.041751 2188520 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:10:39.041776 2188520 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:10:39.041782 2188520 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:10:39.041795 2188520 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:10:39.041804 2188520 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:10:39.043386 2188520 image.go:273] response: 
	I0916 11:10:39.172830 2188520 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:10:39.172875 2188520 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:10:39.172907 2188520 start.go:360] acquireMachinesLock for multinode-890146-m02: {Name:mkb193e5e8454b4e97e0a3d9e40e1ee2de147629 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:10:39.172979 2188520 start.go:364] duration metric: took 49.436µs to acquireMachinesLock for "multinode-890146-m02"
	I0916 11:10:39.173007 2188520 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:10:39.173013 2188520 fix.go:54] fixHost starting: m02
	I0916 11:10:39.173314 2188520 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:10:39.190267 2188520 fix.go:112] recreateIfNeeded on multinode-890146-m02: state=Stopped err=<nil>
	W0916 11:10:39.190311 2188520 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:10:39.192678 2188520 out.go:177] * Restarting existing docker container for "multinode-890146-m02" ...
	I0916 11:10:39.194541 2188520 cli_runner.go:164] Run: docker start multinode-890146-m02
	I0916 11:10:39.499501 2188520 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:10:39.518514 2188520 kic.go:430] container "multinode-890146-m02" state is running.
	I0916 11:10:39.518911 2188520 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:10:39.541919 2188520 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:10:39.542157 2188520 machine.go:93] provisionDockerMachine start ...
	I0916 11:10:39.542215 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:10:39.563867 2188520 main.go:141] libmachine: Using SSH client type: native
	I0916 11:10:39.564127 2188520 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40742 <nil> <nil>}
	I0916 11:10:39.564142 2188520 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:10:39.565042 2188520 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:10:42.702108 2188520 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146-m02
	
	I0916 11:10:42.702134 2188520 ubuntu.go:169] provisioning hostname "multinode-890146-m02"
	I0916 11:10:42.702197 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:10:42.721118 2188520 main.go:141] libmachine: Using SSH client type: native
	I0916 11:10:42.721370 2188520 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40742 <nil> <nil>}
	I0916 11:10:42.721387 2188520 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-890146-m02 && echo "multinode-890146-m02" | sudo tee /etc/hostname
	I0916 11:10:42.874420 2188520 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146-m02
	
	I0916 11:10:42.874507 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:10:42.892918 2188520 main.go:141] libmachine: Using SSH client type: native
	I0916 11:10:42.893210 2188520 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40742 <nil> <nil>}
	I0916 11:10:42.893235 2188520 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-890146-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-890146-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-890146-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:10:43.030976 2188520 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:10:43.031068 2188520 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 11:10:43.031093 2188520 ubuntu.go:177] setting up certificates
	I0916 11:10:43.031131 2188520 provision.go:84] configureAuth start
	I0916 11:10:43.031207 2188520 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:10:43.053715 2188520 provision.go:143] copyHostCerts
	I0916 11:10:43.053762 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:10:43.053796 2188520 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 11:10:43.053808 2188520 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:10:43.053883 2188520 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 11:10:43.053967 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:10:43.053989 2188520 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 11:10:43.053994 2188520 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:10:43.054032 2188520 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 11:10:43.054083 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:10:43.054106 2188520 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 11:10:43.054113 2188520 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:10:43.054140 2188520 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 11:10:43.054193 2188520 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.multinode-890146-m02 san=[127.0.0.1 192.168.58.3 localhost minikube multinode-890146-m02]
	I0916 11:10:43.697505 2188520 provision.go:177] copyRemoteCerts
	I0916 11:10:43.697584 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:10:43.697628 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:10:43.715012 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40742 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:10:43.812251 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:10:43.812323 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 11:10:43.838004 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:10:43.838067 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1229 bytes)
	I0916 11:10:43.864160 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:10:43.864228 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0916 11:10:43.891086 2188520 provision.go:87] duration metric: took 859.936832ms to configureAuth
	I0916 11:10:43.891112 2188520 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:10:43.891348 2188520 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:10:43.891356 2188520 machine.go:96] duration metric: took 4.349192271s to provisionDockerMachine
	I0916 11:10:43.891364 2188520 start.go:293] postStartSetup for "multinode-890146-m02" (driver="docker")
	I0916 11:10:43.891374 2188520 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:10:43.891426 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:10:43.891467 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:10:43.908512 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40742 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:10:44.013659 2188520 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:10:44.017162 2188520 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:10:44.017245 2188520 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:10:44.017268 2188520 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:10:44.017287 2188520 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:10:44.017304 2188520 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:10:44.017336 2188520 command_runner.go:130] > ID=ubuntu
	I0916 11:10:44.017354 2188520 command_runner.go:130] > ID_LIKE=debian
	I0916 11:10:44.017370 2188520 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:10:44.017385 2188520 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:10:44.017414 2188520 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:10:44.017439 2188520 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:10:44.017456 2188520 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:10:44.018113 2188520 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:10:44.018189 2188520 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:10:44.018216 2188520 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:10:44.018250 2188520 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:10:44.018281 2188520 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 11:10:44.018363 2188520 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 11:10:44.018483 2188520 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 11:10:44.018511 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 11:10:44.018661 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:10:44.028535 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:10:44.056442 2188520 start.go:296] duration metric: took 165.062579ms for postStartSetup
	I0916 11:10:44.056530 2188520 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:10:44.056575 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:10:44.075383 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40742 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:10:44.172137 2188520 command_runner.go:130] > 22%
	I0916 11:10:44.172685 2188520 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:10:44.176967 2188520 command_runner.go:130] > 153G
	I0916 11:10:44.177442 2188520 fix.go:56] duration metric: took 5.004422866s for fixHost
	I0916 11:10:44.177461 2188520 start.go:83] releasing machines lock for "multinode-890146-m02", held for 5.004469602s
	I0916 11:10:44.177540 2188520 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:10:44.197897 2188520 out.go:177] * Found network options:
	I0916 11:10:44.200358 2188520 out.go:177]   - NO_PROXY=192.168.58.2
	W0916 11:10:44.202816 2188520 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:10:44.202861 2188520 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:10:44.202936 2188520 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:10:44.202983 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:10:44.203250 2188520 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:10:44.203311 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:10:44.220257 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40742 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:10:44.235695 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40742 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:10:44.436603 2188520 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:10:44.436676 2188520 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:10:44.436690 2188520 command_runner.go:130] >   Size: 78        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:10:44.436700 2188520 command_runner.go:130] > Device: d9h/217d	Inode: 1336071     Links: 1
	I0916 11:10:44.436726 2188520 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:10:44.436736 2188520 command_runner.go:130] > Access: 2024-09-16 11:08:06.116096455 +0000
	I0916 11:10:44.436745 2188520 command_runner.go:130] > Modify: 2024-09-16 11:08:05.980097161 +0000
	I0916 11:10:44.436750 2188520 command_runner.go:130] > Change: 2024-09-16 11:08:05.980097161 +0000
	I0916 11:10:44.436755 2188520 command_runner.go:130] >  Birth: 2024-09-16 11:08:05.980097161 +0000
	I0916 11:10:44.436851 2188520 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 11:10:44.455483 2188520 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:10:44.455585 2188520 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:10:44.464860 2188520 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:10:44.464884 2188520 start.go:495] detecting cgroup driver to use...
	I0916 11:10:44.464920 2188520 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:10:44.464973 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 11:10:44.478295 2188520 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 11:10:44.490511 2188520 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:10:44.490584 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:10:44.504605 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:10:44.519666 2188520 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:10:44.605066 2188520 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:10:44.698751 2188520 docker.go:233] disabling docker service ...
	I0916 11:10:44.698871 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:10:44.713805 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:10:44.727750 2188520 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:10:44.817873 2188520 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:10:44.911233 2188520 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:10:44.923127 2188520 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:10:44.942711 2188520 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0916 11:10:44.944598 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 11:10:44.955979 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 11:10:44.966973 2188520 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 11:10:44.967084 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 11:10:44.979378 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:10:44.990253 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 11:10:45.001055 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:10:45.029676 2188520 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:10:45.045296 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 11:10:45.061688 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 11:10:45.080476 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 11:10:45.096447 2188520 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:10:45.106974 2188520 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:10:45.109038 2188520 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:10:45.122530 2188520 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:10:45.273898 2188520 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 11:10:45.452064 2188520 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 11:10:45.452175 2188520 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 11:10:45.456097 2188520 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0916 11:10:45.456122 2188520 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:10:45.456130 2188520 command_runner.go:130] > Device: 10000ah/1048586d	Inode: 169         Links: 1
	I0916 11:10:45.456136 2188520 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:10:45.456142 2188520 command_runner.go:130] > Access: 2024-09-16 11:10:45.383254178 +0000
	I0916 11:10:45.456147 2188520 command_runner.go:130] > Modify: 2024-09-16 11:10:45.383254178 +0000
	I0916 11:10:45.456181 2188520 command_runner.go:130] > Change: 2024-09-16 11:10:45.383254178 +0000
	I0916 11:10:45.456192 2188520 command_runner.go:130] >  Birth: -
	I0916 11:10:45.456777 2188520 start.go:563] Will wait 60s for crictl version
	I0916 11:10:45.456855 2188520 ssh_runner.go:195] Run: which crictl
	I0916 11:10:45.460717 2188520 command_runner.go:130] > /usr/bin/crictl
	I0916 11:10:45.461028 2188520 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:10:45.509840 2188520 command_runner.go:130] > Version:  0.1.0
	I0916 11:10:45.509865 2188520 command_runner.go:130] > RuntimeName:  containerd
	I0916 11:10:45.509872 2188520 command_runner.go:130] > RuntimeVersion:  1.7.22
	I0916 11:10:45.509878 2188520 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:10:45.512785 2188520 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 11:10:45.512885 2188520 ssh_runner.go:195] Run: containerd --version
	I0916 11:10:45.533727 2188520 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:10:45.535701 2188520 ssh_runner.go:195] Run: containerd --version
	I0916 11:10:45.555704 2188520 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:10:45.559878 2188520 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 11:10:45.561639 2188520 out.go:177]   - env NO_PROXY=192.168.58.2
	I0916 11:10:45.563717 2188520 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:10:45.583102 2188520 ssh_runner.go:195] Run: grep 192.168.58.1	host.minikube.internal$ /etc/hosts
	I0916 11:10:45.586630 2188520 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.58.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:10:45.597075 2188520 mustload.go:65] Loading cluster: multinode-890146
	I0916 11:10:45.597313 2188520 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:10:45.597594 2188520 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:10:45.613809 2188520 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:10:45.614096 2188520 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146 for IP: 192.168.58.3
	I0916 11:10:45.614104 2188520 certs.go:194] generating shared ca certs ...
	I0916 11:10:45.614118 2188520 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:10:45.614238 2188520 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 11:10:45.614279 2188520 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 11:10:45.614290 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:10:45.614303 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:10:45.614314 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:10:45.614325 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:10:45.614377 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 11:10:45.614405 2188520 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 11:10:45.614413 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:10:45.614437 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 11:10:45.614458 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:10:45.614478 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 11:10:45.614518 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:10:45.614545 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 11:10:45.614557 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:10:45.614570 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 11:10:45.614587 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:10:45.640550 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 11:10:45.664448 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:10:45.690155 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 11:10:45.714257 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 11:10:45.739357 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:10:45.764175 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 11:10:45.789204 2188520 ssh_runner.go:195] Run: openssl version
	I0916 11:10:45.796000 2188520 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:10:45.796391 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 11:10:45.805533 2188520 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 11:10:45.808844 2188520 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:10:45.808870 2188520 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:10:45.808917 2188520 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 11:10:45.815247 2188520 command_runner.go:130] > 51391683
	I0916 11:10:45.815649 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 11:10:45.824629 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 11:10:45.834494 2188520 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 11:10:45.837834 2188520 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:10:45.838113 2188520 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:10:45.838207 2188520 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 11:10:45.844644 2188520 command_runner.go:130] > 3ec20f2e
	I0916 11:10:45.845136 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:10:45.853968 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:10:45.863920 2188520 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:10:45.867480 2188520 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:10:45.867755 2188520 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:10:45.867819 2188520 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:10:45.874447 2188520 command_runner.go:130] > b5213941
	I0916 11:10:45.875028 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:10:45.884113 2188520 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:10:45.887537 2188520 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:10:45.887622 2188520 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:10:45.887679 2188520 kubeadm.go:934] updating node {m02 192.168.58.3 8443 v1.31.1 containerd false true} ...
	I0916 11:10:45.887770 2188520 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-890146-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.58.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:10:45.887833 2188520 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:10:45.897774 2188520 command_runner.go:130] > kubeadm
	I0916 11:10:45.897796 2188520 command_runner.go:130] > kubectl
	I0916 11:10:45.897802 2188520 command_runner.go:130] > kubelet
	I0916 11:10:45.897820 2188520 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:10:45.897872 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 11:10:45.906649 2188520 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (324 bytes)
	I0916 11:10:45.926499 2188520 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:10:45.944457 2188520 ssh_runner.go:195] Run: grep 192.168.58.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:10:45.948005 2188520 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.58.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:10:45.959552 2188520 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:10:46.060434 2188520 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:10:46.073987 2188520 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}
	I0916 11:10:46.074429 2188520 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:10:46.077233 2188520 out.go:177] * Verifying Kubernetes components...
	I0916 11:10:46.079688 2188520 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:10:46.174352 2188520 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:10:46.190992 2188520 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:10:46.191317 2188520 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:10:46.192825 2188520 node_ready.go:35] waiting up to 6m0s for node "multinode-890146-m02" to be "Ready" ...
	I0916 11:10:46.192933 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:46.192945 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:46.192954 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:46.192958 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:46.195488 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:46.195544 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:46.195553 2188520 round_trippers.go:580]     Audit-Id: 4b08f79a-fc75-4d5e-b219-8fa0b89c3a57
	I0916 11:10:46.195559 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:46.195562 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:46.195565 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:46.195575 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:46.195579 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:46 GMT
	I0916 11:10:46.195704 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:46.693206 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:46.693233 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:46.693244 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:46.693249 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:46.695471 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:46.695496 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:46.695505 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:46 GMT
	I0916 11:10:46.695509 2188520 round_trippers.go:580]     Audit-Id: f253a8f7-e14f-40de-a4fd-22a6400ad017
	I0916 11:10:46.695511 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:46.695515 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:46.695517 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:46.695520 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:46.695660 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:47.193825 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:47.193850 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:47.193861 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:47.193867 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:47.196315 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:47.196342 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:47.196352 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:47.196358 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:47.196361 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:47 GMT
	I0916 11:10:47.196364 2188520 round_trippers.go:580]     Audit-Id: 6f1cb358-a698-422e-89e1-a0428d769f7e
	I0916 11:10:47.196366 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:47.196369 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:47.196683 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:47.693435 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:47.693459 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:47.693468 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:47.693472 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:47.695882 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:47.695906 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:47.695915 2188520 round_trippers.go:580]     Audit-Id: 516b3d5f-e461-42be-96d5-154d4fa59640
	I0916 11:10:47.695918 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:47.695921 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:47.695923 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:47.695926 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:47.695929 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:47 GMT
	I0916 11:10:47.696231 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:48.193423 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:48.193448 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:48.193458 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:48.193462 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:48.196675 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:48.196752 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:48.196774 2188520 round_trippers.go:580]     Audit-Id: e304a5c0-42c2-4c16-8fbe-0734fc7b1e9f
	I0916 11:10:48.196795 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:48.196827 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:48.196848 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:48.196864 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:48.196881 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:48 GMT
	I0916 11:10:48.197040 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:48.197494 2188520 node_ready.go:53] node "multinode-890146-m02" has status "Ready":"Unknown"
	I0916 11:10:48.693717 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:48.693740 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:48.693749 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:48.693754 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:48.696019 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:48.696041 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:48.696051 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:48.696055 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:48.696058 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:48.696060 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:48.696064 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:48 GMT
	I0916 11:10:48.696067 2188520 round_trippers.go:580]     Audit-Id: 9452013c-adcf-4e5d-b375-d79c1bff25ec
	I0916 11:10:48.696678 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:49.193107 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:49.193137 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:49.193147 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:49.193153 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:49.196143 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:49.196165 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:49.196174 2188520 round_trippers.go:580]     Audit-Id: 372db250-9c76-4e4c-8ee1-4957ec4f5898
	I0916 11:10:49.196177 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:49.196180 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:49.196183 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:49.196185 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:49.196188 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:49 GMT
	I0916 11:10:49.196583 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:49.693112 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:49.693137 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:49.693147 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:49.693151 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:49.695526 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:49.695555 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:49.695564 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:49.695568 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:49 GMT
	I0916 11:10:49.695572 2188520 round_trippers.go:580]     Audit-Id: 6bfca655-4122-421a-8ee3-1b83fd5ffd45
	I0916 11:10:49.695574 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:49.695577 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:49.695580 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:49.695956 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:50.193144 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:50.193172 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:50.193233 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:50.193246 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:50.197017 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:50.197046 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:50.197055 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:50.197062 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:50.197069 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:50 GMT
	I0916 11:10:50.197074 2188520 round_trippers.go:580]     Audit-Id: 5414e465-5026-4380-ad6d-2b24868162c8
	I0916 11:10:50.197077 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:50.197081 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:50.197640 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:50.198042 2188520 node_ready.go:53] node "multinode-890146-m02" has status "Ready":"Unknown"
	I0916 11:10:50.693785 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:50.693806 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:50.693816 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:50.693819 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:50.696038 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:50.696057 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:50.696066 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:50 GMT
	I0916 11:10:50.696070 2188520 round_trippers.go:580]     Audit-Id: 62c11994-3aca-42a1-9e31-bd51d98940f1
	I0916 11:10:50.696072 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:50.696075 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:50.696078 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:50.696080 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:50.696186 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:51.193300 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:51.193330 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:51.193340 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:51.193344 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:51.195981 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:51.196013 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:51.196024 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:51.196028 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:51 GMT
	I0916 11:10:51.196031 2188520 round_trippers.go:580]     Audit-Id: 15c955d9-42eb-4286-b945-010fe8575ba1
	I0916 11:10:51.196035 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:51.196038 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:51.196040 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:51.196155 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:51.693683 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:51.693711 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:51.693721 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:51.693726 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:51.696181 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:51.696208 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:51.696217 2188520 round_trippers.go:580]     Audit-Id: a93194f4-0896-406c-9ad8-aa5c4f5f028f
	I0916 11:10:51.696221 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:51.696225 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:51.696228 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:51.696231 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:51.696234 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:51 GMT
	I0916 11:10:51.696417 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:52.193096 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:52.193120 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:52.193129 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:52.193137 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:52.195297 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:52.195318 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:52.195326 2188520 round_trippers.go:580]     Audit-Id: b089690a-a179-47dd-99ed-e0d40a72102c
	I0916 11:10:52.195330 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:52.195334 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:52.195337 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:52.195339 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:52.195343 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:52 GMT
	I0916 11:10:52.195467 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:52.693536 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:52.693561 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:52.693571 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:52.693577 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:52.695716 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:52.695745 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:52.695753 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:52 GMT
	I0916 11:10:52.695758 2188520 round_trippers.go:580]     Audit-Id: 015b764a-c329-4762-8817-60d501f3091c
	I0916 11:10:52.695761 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:52.695764 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:52.695767 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:52.695769 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:52.695882 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:52.696284 2188520 node_ready.go:53] node "multinode-890146-m02" has status "Ready":"Unknown"
	I0916 11:10:53.194091 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:53.194116 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:53.194136 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:53.194140 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:53.196473 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:53.196497 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:53.196506 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:53.196510 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:53.196515 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:53 GMT
	I0916 11:10:53.196519 2188520 round_trippers.go:580]     Audit-Id: 40975d80-241c-4907-8d34-4b5e77c9c759
	I0916 11:10:53.196522 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:53.196525 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:53.196723 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:53.693208 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:53.693235 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:53.693245 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:53.693250 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:53.695519 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:53.695539 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:53.695548 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:53.695551 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:53.695554 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:53.695557 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:53 GMT
	I0916 11:10:53.695560 2188520 round_trippers.go:580]     Audit-Id: f099ce22-71b7-4454-9296-66b49ff9f44d
	I0916 11:10:53.695563 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:53.695676 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"821","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5829 chars]
	I0916 11:10:54.193950 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:54.193975 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.193984 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.193989 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.196388 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.196411 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.196421 2188520 round_trippers.go:580]     Audit-Id: a1216c30-abd0-4b6b-b6c2-9e81f63dd1b7
	I0916 11:10:54.196428 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.196432 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.196435 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.196439 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.196444 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.196570 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:54.196961 2188520 node_ready.go:49] node "multinode-890146-m02" has status "Ready":"True"
	I0916 11:10:54.196973 2188520 node_ready.go:38] duration metric: took 8.004116999s for node "multinode-890146-m02" to be "Ready" ...
	I0916 11:10:54.196984 2188520 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:10:54.197052 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:10:54.197057 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.197065 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.197071 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.200646 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:10:54.200674 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.200684 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.200689 2188520 round_trippers.go:580]     Audit-Id: e96acc59-df5d-4a5e-98b7-b5216a868b11
	I0916 11:10:54.200692 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.200695 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.200698 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.200701 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.202071 2188520 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"830"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"792","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 91134 chars]
	I0916 11:10:54.206016 2188520 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:54.206105 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:10:54.206112 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.206121 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.206125 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.208523 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.208548 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.208558 2188520 round_trippers.go:580]     Audit-Id: 481a0d13-2170-4c9a-995f-b537d5aa0d4f
	I0916 11:10:54.208564 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.208568 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.208590 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.208598 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.208602 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.208900 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"792","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6693 chars]
	I0916 11:10:54.209486 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:10:54.209505 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.209516 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.209520 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.211796 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.211822 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.211831 2188520 round_trippers.go:580]     Audit-Id: 33f7b542-d1f5-4b9e-b2b7-3ba662a61337
	I0916 11:10:54.211835 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.211840 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.211843 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.211845 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.211849 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.212099 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:10:54.212606 2188520 pod_ready.go:93] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:54.212627 2188520 pod_ready.go:82] duration metric: took 6.584925ms for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:54.212639 2188520 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:54.212737 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-890146
	I0916 11:10:54.212748 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.212757 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.212765 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.215551 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.215577 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.215585 2188520 round_trippers.go:580]     Audit-Id: 8099f08d-3810-4d6f-8eac-f8b50ba2c58f
	I0916 11:10:54.215588 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.215592 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.215595 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.215599 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.215601 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.215734 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-890146","namespace":"kube-system","uid":"59c960ab-f6dd-4ed3-856c-ba2a02295b12","resourceVersion":"781","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.58.2:2379","kubernetes.io/config.hash":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.mirror":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.seen":"2024-09-16T11:07:32.783608135Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6653 chars]
	I0916 11:10:54.216234 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:10:54.216250 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.216259 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.216265 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.218288 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.218307 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.218316 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.218321 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.218324 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.218328 2188520 round_trippers.go:580]     Audit-Id: eb0b84a4-d532-45f7-89dc-54b0246e55b8
	I0916 11:10:54.218331 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.218333 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.218446 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:10:54.218867 2188520 pod_ready.go:93] pod "etcd-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:54.218883 2188520 pod_ready.go:82] duration metric: took 6.216582ms for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:54.218906 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:54.218972 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-890146
	I0916 11:10:54.218977 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.218986 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.218989 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.221080 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.221103 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.221112 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.221116 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.221119 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.221124 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.221127 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.221130 2188520 round_trippers.go:580]     Audit-Id: 8252a82a-1e3a-426f-bd95-8005be727b13
	I0916 11:10:54.221358 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-890146","namespace":"kube-system","uid":"9846229d-7227-453f-8c30-697aaba61648","resourceVersion":"771","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.58.2:8443","kubernetes.io/config.hash":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.mirror":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.seen":"2024-09-16T11:07:32.783610957Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8731 chars]
	I0916 11:10:54.221924 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:10:54.221944 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.221952 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.221958 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.224206 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.224229 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.224238 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.224243 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.224246 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.224249 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.224253 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.224256 2188520 round_trippers.go:580]     Audit-Id: fb96347e-f7f5-4ad3-925d-c5ebe574ec1b
	I0916 11:10:54.224556 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:10:54.224941 2188520 pod_ready.go:93] pod "kube-apiserver-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:54.224953 2188520 pod_ready.go:82] duration metric: took 6.040976ms for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:54.224965 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:54.225029 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-890146
	I0916 11:10:54.225034 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.225042 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.225049 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.227364 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.227429 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.227446 2188520 round_trippers.go:580]     Audit-Id: 0eaa717b-eb52-42c1-bc94-024ead765d8d
	I0916 11:10:54.227451 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.227456 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.227459 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.227462 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.227465 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.227876 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-890146","namespace":"kube-system","uid":"421b15a5-a8e2-4631-bf18-1592c8747b09","resourceVersion":"773","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.mirror":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.seen":"2024-09-16T11:07:32.783594137Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8306 chars]
	I0916 11:10:54.228461 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:10:54.228479 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.228488 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.228493 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.230810 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.230842 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.230850 2188520 round_trippers.go:580]     Audit-Id: 8840f082-f154-4725-837c-441c2b6ce8a8
	I0916 11:10:54.230854 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.230857 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.230862 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.230865 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.230867 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.231030 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:10:54.231482 2188520 pod_ready.go:93] pod "kube-controller-manager-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:10:54.231500 2188520 pod_ready.go:82] duration metric: took 6.527284ms for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:54.231517 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:10:54.394884 2188520 request.go:632] Waited for 163.288294ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:54.394985 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:54.395000 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.395009 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.395030 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.397463 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.397491 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.397501 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.397505 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.397508 2188520 round_trippers.go:580]     Audit-Id: 756b18b3-e008-47bb-99c0-0bec137d3d14
	I0916 11:10:54.397511 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.397514 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.397517 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.397888 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:54.594838 2188520 request.go:632] Waited for 196.322429ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:54.594898 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:54.594904 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.594914 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.594922 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.597263 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.597335 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.597357 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.597374 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.597407 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.597413 2188520 round_trippers.go:580]     Audit-Id: c34403a4-72e5-4f2d-af95-1b7ad080f4ac
	I0916 11:10:54.597416 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.597419 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.597566 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:54.794163 2188520 request.go:632] Waited for 62.202101ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:54.794227 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:54.794233 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.794249 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.794257 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.796874 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.796942 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.796973 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.796985 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.796991 2188520 round_trippers.go:580]     Audit-Id: 64c7da03-99d2-4c31-a95f-3070c7787705
	I0916 11:10:54.796994 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.796997 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.797002 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.797130 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:54.993989 2188520 request.go:632] Waited for 196.27754ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:54.994053 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:54.994059 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:54.994068 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:54.994076 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:54.996574 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:54.996635 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:54.996666 2188520 round_trippers.go:580]     Audit-Id: c3d22619-e073-4f73-89b5-9f0651251553
	I0916 11:10:54.996684 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:54.996726 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:54.996742 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:54.996746 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:54.996750 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:54 GMT
	I0916 11:10:54.996904 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:55.232142 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:55.232173 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:55.232182 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:55.232186 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:55.234589 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:55.234668 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:55.234722 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:55.234751 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:55.234771 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:55.234788 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:55 GMT
	I0916 11:10:55.234809 2188520 round_trippers.go:580]     Audit-Id: 879ac870-d88c-4c52-b233-75ffaeb5b32d
	I0916 11:10:55.234826 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:55.234946 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:55.394857 2188520 request.go:632] Waited for 159.342058ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:55.394933 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:55.394952 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:55.394961 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:55.394968 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:55.397114 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:55.397209 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:55.397227 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:55.397232 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:55.397250 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:55.397255 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:55 GMT
	I0916 11:10:55.397258 2188520 round_trippers.go:580]     Audit-Id: 210d0701-a9d9-4703-b3ff-ded72f0717be
	I0916 11:10:55.397261 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:55.397400 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:55.732580 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:55.732657 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:55.732692 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:55.732708 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:55.735055 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:55.735129 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:55.735146 2188520 round_trippers.go:580]     Audit-Id: 24541db2-8cb1-4182-b469-cb467cf7b497
	I0916 11:10:55.735151 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:55.735154 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:55.735157 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:55.735160 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:55.735163 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:55 GMT
	I0916 11:10:55.735776 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:55.794788 2188520 request.go:632] Waited for 58.315477ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:55.794928 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:55.794967 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:55.794986 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:55.794998 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:55.797292 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:55.797312 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:55.797322 2188520 round_trippers.go:580]     Audit-Id: 9671823b-2e56-4783-b584-be074dcf8cdf
	I0916 11:10:55.797357 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:55.797368 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:55.797371 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:55.797376 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:55.797379 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:55 GMT
	I0916 11:10:55.797602 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:56.232686 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:56.232712 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:56.232721 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.232724 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.235301 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.235328 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:56.235337 2188520 round_trippers.go:580]     Audit-Id: 14491a11-9ff9-4d71-8cef-43c8bb3be496
	I0916 11:10:56.235341 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.235346 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.235351 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:56.235354 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:56.235357 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.235651 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:56.237335 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:56.237355 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:56.237364 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.237369 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.239426 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.239464 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:56.239473 2188520 round_trippers.go:580]     Audit-Id: e355e30d-d887-4e29-8c72-cca76947797c
	I0916 11:10:56.239477 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.239484 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.239487 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:56.239490 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:56.239493 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.239828 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:56.240225 2188520 pod_ready.go:103] pod "kube-proxy-59f9h" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:56.732502 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:56.732526 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:56.732536 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.732539 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.734926 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.734964 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:56.734973 2188520 round_trippers.go:580]     Audit-Id: d4985ad9-5245-40c1-8d0c-7f62f0597369
	I0916 11:10:56.734978 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.734982 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.734985 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:56.734988 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:56.734993 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.735183 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:56.735725 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:56.735749 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:56.735758 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:56.735764 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:56.737829 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:56.737851 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:56.737860 2188520 round_trippers.go:580]     Audit-Id: 71354c50-6050-4902-94bf-1e9230b2d7eb
	I0916 11:10:56.737867 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:56.737871 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:56.737876 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:56.737879 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:56.737882 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:56 GMT
	I0916 11:10:56.738106 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:57.231762 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:57.231790 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:57.231800 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.231804 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.234182 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.234271 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:57.234301 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.234318 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:57.234345 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:57.234381 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.234398 2188520 round_trippers.go:580]     Audit-Id: 619b4b7e-5fb7-4941-96c5-4156567749a2
	I0916 11:10:57.234420 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.234619 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:57.235190 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:57.235211 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:57.235219 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.235225 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.237697 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.237718 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:57.237726 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.237729 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.237732 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:57.237736 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:57.237739 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.237742 2188520 round_trippers.go:580]     Audit-Id: 659762ed-cccf-4ae3-8241-e6666ad6e640
	I0916 11:10:57.238125 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:57.731744 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:57.731771 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:57.731779 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.731784 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.734067 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.734156 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:57.734179 2188520 round_trippers.go:580]     Audit-Id: 422c7105-48b6-4d42-a8e3-5dc9ef207f98
	I0916 11:10:57.734215 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.734237 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.734254 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:57.734273 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:57.734301 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.734428 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:57.735017 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:57.735034 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:57.735042 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:57.735047 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:57.737100 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:57.737155 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:57.737179 2188520 round_trippers.go:580]     Audit-Id: 75f91811-defa-48d9-b57a-10bbc10906dd
	I0916 11:10:57.737197 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:57.737227 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:57.737246 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:57.737262 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:57.737271 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:57 GMT
	I0916 11:10:57.737410 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:58.232135 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:58.232160 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:58.232169 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.232175 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.234526 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:58.234548 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:58.234556 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.234561 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:58.234564 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:58.234567 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.234569 2188520 round_trippers.go:580]     Audit-Id: dd4ec1b1-bb81-4695-9012-6fdced5cac54
	I0916 11:10:58.234572 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.234719 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:58.235248 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:58.235268 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:58.235279 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.235284 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.237241 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:10:58.237260 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:58.237269 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.237274 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:58.237278 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:58.237282 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.237286 2188520 round_trippers.go:580]     Audit-Id: ff68a4c2-7e32-4cc9-9a85-85f36c19536a
	I0916 11:10:58.237288 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.237419 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:58.732383 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:58.732408 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:58.732418 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.732425 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.734708 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:58.734749 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:58.734759 2188520 round_trippers.go:580]     Audit-Id: ca3c458d-84f3-4b7b-8635-7a831b135bed
	I0916 11:10:58.734762 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.734765 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.734768 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:58.734771 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:58.734774 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.735087 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:58.735606 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:58.735625 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:58.735634 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:58.735645 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:58.737724 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:58.737741 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:58.737749 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:58.737754 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:58.737759 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:58.737763 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:58 GMT
	I0916 11:10:58.737765 2188520 round_trippers.go:580]     Audit-Id: e7df2e00-9c77-44ce-8ea9-aa09266309ac
	I0916 11:10:58.737768 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:58.737931 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:58.738290 2188520 pod_ready.go:103] pod "kube-proxy-59f9h" in "kube-system" namespace has status "Ready":"False"
	I0916 11:10:59.231718 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:59.231742 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:59.231752 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.231756 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.236100 2188520 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:10:59.236177 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:59.236189 2188520 round_trippers.go:580]     Audit-Id: 29676bdd-53d2-44ae-b6a0-7501e17d69b2
	I0916 11:10:59.236195 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.236199 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.236204 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:59.236206 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:59.236212 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.236962 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:59.237583 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:59.237601 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:59.237610 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.237616 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.239990 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.240054 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:59.240078 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:59.240098 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.240126 2188520 round_trippers.go:580]     Audit-Id: d044d3fe-0b23-4e27-86cb-7e3b8430f993
	I0916 11:10:59.240147 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.240163 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.240179 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:59.240338 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:10:59.731820 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:10:59.731844 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:59.731854 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.731860 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.734117 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.734142 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:59.734151 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.734156 2188520 round_trippers.go:580]     Audit-Id: 9d2ef566-31b8-4151-b0da-0ce7330c36f7
	I0916 11:10:59.734160 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.734163 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.734167 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:59.734170 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:59.734489 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:10:59.735064 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:10:59.735081 2188520 round_trippers.go:469] Request Headers:
	I0916 11:10:59.735090 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:10:59.735094 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:10:59.737173 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:10:59.737232 2188520 round_trippers.go:577] Response Headers:
	I0916 11:10:59.737254 2188520 round_trippers.go:580]     Audit-Id: 2fc53d1d-9374-469a-846d-b49b924976ba
	I0916 11:10:59.737273 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:10:59.737303 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:10:59.737312 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:10:59.737315 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:10:59.737318 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:10:59 GMT
	I0916 11:10:59.737447 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:00.232381 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:00.232416 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:00.232428 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.232434 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.235441 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:00.235463 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:00.235472 2188520 round_trippers.go:580]     Audit-Id: 26a618b7-f49f-47ce-ae9c-570682bc10d3
	I0916 11:11:00.235477 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.235480 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.235484 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:00.235490 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:00.235493 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.236102 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:11:00.236749 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:00.236761 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:00.236771 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.236775 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.269030 2188520 round_trippers.go:574] Response Status: 200 OK in 32 milliseconds
	I0916 11:11:00.269054 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:00.269063 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:00.269068 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.269073 2188520 round_trippers.go:580]     Audit-Id: 9a2d5992-8637-4f10-9de1-1938ab54fb71
	I0916 11:11:00.269078 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.269082 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.269084 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:00.269738 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:00.731877 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:00.731904 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:00.731913 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.731918 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.734257 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:00.734329 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:00.734349 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.734367 2188520 round_trippers.go:580]     Audit-Id: 70e4e0b9-b222-4544-8528-b3b6d898b348
	I0916 11:11:00.734396 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.734445 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.734463 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:00.734495 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:00.734617 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:11:00.735253 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:00.735271 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:00.735281 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:00.735286 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:00.737464 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:00.737494 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:00.737502 2188520 round_trippers.go:580]     Audit-Id: 0da2021a-ac99-4e12-a006-9ef4c5290297
	I0916 11:11:00.737507 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:00.737511 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:00.737515 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:00.737518 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:00.737520 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:00 GMT
	I0916 11:11:00.737672 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:01.231745 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:01.231772 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:01.231781 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:01.231786 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:01.234376 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:01.234401 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:01.234410 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:01 GMT
	I0916 11:11:01.234415 2188520 round_trippers.go:580]     Audit-Id: d893e148-e9d0-4da5-9535-78400bcec3a3
	I0916 11:11:01.234418 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:01.234421 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:01.234425 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:01.234450 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:01.234596 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:11:01.235217 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:01.235236 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:01.235245 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:01.235250 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:01.237328 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:01.237351 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:01.237359 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:01.237363 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:01.237365 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:01.237369 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:01.237372 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:01 GMT
	I0916 11:11:01.237376 2188520 round_trippers.go:580]     Audit-Id: 18f0141b-1351-425d-b3b3-05398e3ba05a
	I0916 11:11:01.237550 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:01.237925 2188520 pod_ready.go:103] pod "kube-proxy-59f9h" in "kube-system" namespace has status "Ready":"False"
	I0916 11:11:01.731784 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:01.731810 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:01.731820 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:01.731823 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:01.734122 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:01.734145 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:01.734153 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:01.734157 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:01.734160 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:01.734163 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:01 GMT
	I0916 11:11:01.734166 2188520 round_trippers.go:580]     Audit-Id: 537b2879-42bb-41bb-bb6e-4d058d446139
	I0916 11:11:01.734170 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:01.734488 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:11:01.735064 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:01.735081 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:01.735089 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:01.735094 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:01.737097 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:01.737120 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:01.737129 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:01.737135 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:01.737140 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:01.737144 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:01.737148 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:01 GMT
	I0916 11:11:01.737150 2188520 round_trippers.go:580]     Audit-Id: e4d33518-aaec-4bbb-886f-c9c012c890b0
	I0916 11:11:01.737452 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:02.232407 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:02.232433 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:02.232442 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:02.232447 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:02.234978 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:02.235014 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:02.235024 2188520 round_trippers.go:580]     Audit-Id: 77d96d62-53e1-4bce-9415-341d8a234623
	I0916 11:11:02.235028 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:02.235048 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:02.235061 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:02.235065 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:02.235068 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:02 GMT
	I0916 11:11:02.235218 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:11:02.235790 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:02.235807 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:02.235816 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:02.235846 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:02.237827 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:02.237852 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:02.237860 2188520 round_trippers.go:580]     Audit-Id: 14105278-1134-4236-b8b8-6d4a6d60a2da
	I0916 11:11:02.237865 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:02.237868 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:02.237871 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:02.237874 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:02.237887 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:02 GMT
	I0916 11:11:02.238334 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:02.731768 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:02.731793 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:02.731802 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:02.731805 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:02.734113 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:02.734137 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:02.734146 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:02.734158 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:02.734163 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:02.734167 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:02 GMT
	I0916 11:11:02.734170 2188520 round_trippers.go:580]     Audit-Id: ae374b6a-e784-4a47-b738-ff854d0d4001
	I0916 11:11:02.734173 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:02.734489 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:11:02.735039 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:02.735059 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:02.735067 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:02.735071 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:02.737070 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:02.737091 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:02.737098 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:02.737102 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:02.737105 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:02.737137 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:02 GMT
	I0916 11:11:02.737146 2188520 round_trippers.go:580]     Audit-Id: ba81af2c-1277-410f-956e-f2808c46efcb
	I0916 11:11:02.737150 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:02.737262 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:03.231794 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:03.231822 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:03.231830 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:03.231835 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:03.234143 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:03.234207 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:03.234230 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:03.234245 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:03 GMT
	I0916 11:11:03.234261 2188520 round_trippers.go:580]     Audit-Id: f7c4a24e-dddf-4c23-97da-b370483562f0
	I0916 11:11:03.234286 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:03.234308 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:03.234324 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:03.234756 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:11:03.235306 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:03.235324 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:03.235335 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:03.235339 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:03.237323 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:03.237343 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:03.237352 2188520 round_trippers.go:580]     Audit-Id: fe74c30d-b0f3-455a-9c35-4eccf5c9d7da
	I0916 11:11:03.237355 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:03.237358 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:03.237362 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:03.237365 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:03.237369 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:03 GMT
	I0916 11:11:03.237562 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:03.237947 2188520 pod_ready.go:103] pod "kube-proxy-59f9h" in "kube-system" namespace has status "Ready":"False"
	I0916 11:11:03.732334 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:03.732364 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:03.732375 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:03.732379 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:03.734600 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:03.734620 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:03.734628 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:03.734634 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:03.734638 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:03.734642 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:03 GMT
	I0916 11:11:03.734644 2188520 round_trippers.go:580]     Audit-Id: a122e0cb-8daa-4a42-bb71-2fb32b493952
	I0916 11:11:03.734647 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:03.735085 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"807","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6408 chars]
	I0916 11:11:03.735619 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:03.735637 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:03.735647 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:03.735656 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:03.737533 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:03.737584 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:03.737607 2188520 round_trippers.go:580]     Audit-Id: a431b707-dbea-41fd-88da-2f7de08076fc
	I0916 11:11:03.737626 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:03.737643 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:03.737655 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:03.737675 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:03.737681 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:03 GMT
	I0916 11:11:03.737818 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:04.232639 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:04.232664 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:04.232674 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:04.232679 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:04.235185 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:04.235233 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:04.235243 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:04.235249 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:04 GMT
	I0916 11:11:04.235253 2188520 round_trippers.go:580]     Audit-Id: 6e2087c9-f20e-47ef-b246-da03f0e99c33
	I0916 11:11:04.235257 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:04.235260 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:04.235263 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:04.235374 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"865","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6811 chars]
	I0916 11:11:04.235918 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:04.235936 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:04.235945 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:04.235950 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:04.240658 2188520 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:11:04.240686 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:04.240694 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:04.240698 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:04.240701 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:04.240704 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:04 GMT
	I0916 11:11:04.240706 2188520 round_trippers.go:580]     Audit-Id: a8ec88aa-da4d-4058-9d14-ac2d93f771d1
	I0916 11:11:04.240709 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:04.241227 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:04.732288 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:04.732313 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:04.732322 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:04.732326 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:04.734753 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:04.734780 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:04.734791 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:04.734796 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:04 GMT
	I0916 11:11:04.734801 2188520 round_trippers.go:580]     Audit-Id: dfd5750f-4975-4892-ab1e-7efff6d3b29a
	I0916 11:11:04.734804 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:04.734807 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:04.734809 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:04.735042 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"865","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6811 chars]
	I0916 11:11:04.735573 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:04.735590 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:04.735599 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:04.735607 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:04.737461 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:04.737483 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:04.737491 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:04.737494 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:04.737497 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:04.737500 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:04 GMT
	I0916 11:11:04.737502 2188520 round_trippers.go:580]     Audit-Id: dc6739eb-9c46-4041-8e9d-b4a2db4198f5
	I0916 11:11:04.737505 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:04.737603 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:05.232726 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:05.232750 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:05.232759 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:05.232763 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:05.235243 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:05.235266 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:05.235275 2188520 round_trippers.go:580]     Audit-Id: 8f9420ae-06e9-4850-bf9d-e316ffe44a89
	I0916 11:11:05.235279 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:05.235282 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:05.235285 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:05.235288 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:05.235290 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:05 GMT
	I0916 11:11:05.235677 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"885","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6396 chars]
	I0916 11:11:05.236203 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:05.236221 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:05.236230 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:05.236237 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:05.238248 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:05.238270 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:05.238278 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:05.238282 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:05.238284 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:05.238287 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:05 GMT
	I0916 11:11:05.238289 2188520 round_trippers.go:580]     Audit-Id: e741efad-2814-44cf-8d47-2d575cabc7c6
	I0916 11:11:05.238294 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:05.238580 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:05.239016 2188520 pod_ready.go:93] pod "kube-proxy-59f9h" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:05.239034 2188520 pod_ready.go:82] duration metric: took 11.007506272s for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:05.239045 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:05.239110 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:11:05.239117 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:05.239125 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:05.239133 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:05.243159 2188520 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:11:05.243183 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:05.243192 2188520 round_trippers.go:580]     Audit-Id: 86e32989-f172-4161-a46d-4db0cf180bc5
	I0916 11:11:05.243196 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:05.243200 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:05.243203 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:05.243206 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:05.243208 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:05 GMT
	I0916 11:11:05.243658 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"759","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6388 chars]
	I0916 11:11:05.244176 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:11:05.244195 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:05.244204 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:05.244207 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:05.246189 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:05.246210 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:05.246219 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:05 GMT
	I0916 11:11:05.246226 2188520 round_trippers.go:580]     Audit-Id: 3126262a-6ad2-4467-9cf1-322a176013ed
	I0916 11:11:05.246229 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:05.246233 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:05.246236 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:05.246239 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:05.246346 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:11:05.246772 2188520 pod_ready.go:93] pod "kube-proxy-fm5qr" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:05.246792 2188520 pod_ready.go:82] duration metric: took 7.740488ms for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:05.246802 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-vl27g" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:05.246867 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:05.246877 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:05.246885 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:05.246889 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:05.248889 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:05.248908 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:05.248916 2188520 round_trippers.go:580]     Audit-Id: e98ba813-3959-44f5-b83c-01579728f361
	I0916 11:11:05.248921 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:05.248926 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:05.248929 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:05.248932 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:05.248935 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:05 GMT
	I0916 11:11:05.249189 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:05.249700 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:05.249719 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:05.249728 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:05.249733 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:05.251623 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:05.251645 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:05.251652 2188520 round_trippers.go:580]     Audit-Id: 0577dda2-1e06-4780-83ca-ca44bbc1d730
	I0916 11:11:05.251657 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:05.251660 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:05.251663 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:05.251665 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:05.251668 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:05 GMT
	I0916 11:11:05.251811 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:05.252176 2188520 pod_ready.go:98] node "multinode-890146-m03" hosting pod "kube-proxy-vl27g" in "kube-system" namespace is currently not "Ready" (skipping!): node "multinode-890146-m03" has status "Ready":"Unknown"
	I0916 11:11:05.252196 2188520 pod_ready.go:82] duration metric: took 5.386916ms for pod "kube-proxy-vl27g" in "kube-system" namespace to be "Ready" ...
	E0916 11:11:05.252205 2188520 pod_ready.go:67] WaitExtra: waitPodCondition: node "multinode-890146-m03" hosting pod "kube-proxy-vl27g" in "kube-system" namespace is currently not "Ready" (skipping!): node "multinode-890146-m03" has status "Ready":"Unknown"
	I0916 11:11:05.252214 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:05.252272 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:11:05.252283 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:05.252291 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:05.252298 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:05.254180 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:05.254260 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:05.254270 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:05 GMT
	I0916 11:11:05.254275 2188520 round_trippers.go:580]     Audit-Id: cd8c1128-68e8-428d-9b45-f6fdc0e29d1e
	I0916 11:11:05.254279 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:05.254282 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:05.254284 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:05.254287 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:05.254403 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"779","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5188 chars]
	I0916 11:11:05.254901 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:11:05.254920 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:05.254929 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:05.254933 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:05.256906 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:05.256926 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:05.256933 2188520 round_trippers.go:580]     Audit-Id: e4735885-0a71-4ad8-b76d-d342789a63e7
	I0916 11:11:05.256937 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:05.256939 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:05.256949 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:05.256953 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:05.256955 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:05 GMT
	I0916 11:11:05.257073 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:11:05.257452 2188520 pod_ready.go:93] pod "kube-scheduler-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:05.257470 2188520 pod_ready.go:82] duration metric: took 5.245124ms for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:05.257483 2188520 pod_ready.go:39] duration metric: took 11.06048659s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:11:05.257496 2188520 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:11:05.257550 2188520 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:11:05.270336 2188520 system_svc.go:56] duration metric: took 12.831701ms WaitForService to wait for kubelet
	I0916 11:11:05.270370 2188520 kubeadm.go:582] duration metric: took 19.19629568s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:11:05.270404 2188520 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:11:05.270485 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes
	I0916 11:11:05.270494 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:05.270502 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:05.270507 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:05.273518 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:05.273552 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:05.273562 2188520 round_trippers.go:580]     Audit-Id: 2d1364c2-7f9f-480a-9235-b78d969f4fac
	I0916 11:11:05.273566 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:05.273570 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:05.273574 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:05.273577 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:05.273581 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:05 GMT
	I0916 11:11:05.273943 2188520 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"886"},"items":[{"metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"manag
edFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1", [truncated 18345 chars]
	I0916 11:11:05.275001 2188520 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:05.275025 2188520 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:05.275036 2188520 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:05.275041 2188520 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:05.275045 2188520 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:05.275049 2188520 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:05.275055 2188520 node_conditions.go:105] duration metric: took 4.642716ms to run NodePressure ...
	I0916 11:11:05.275070 2188520 start.go:241] waiting for startup goroutines ...
	I0916 11:11:05.275098 2188520 start.go:255] writing updated cluster config ...
	I0916 11:11:05.277807 2188520 out.go:201] 
	I0916 11:11:05.279818 2188520 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:11:05.279975 2188520 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:11:05.282265 2188520 out.go:177] * Starting "multinode-890146-m03" worker node in "multinode-890146" cluster
	I0916 11:11:05.283898 2188520 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:11:05.286188 2188520 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:11:05.288166 2188520 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:11:05.288214 2188520 cache.go:56] Caching tarball of preloaded images
	I0916 11:11:05.288250 2188520 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:11:05.288367 2188520 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 11:11:05.288380 2188520 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 11:11:05.288516 2188520 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	W0916 11:11:05.308033 2188520 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:11:05.308056 2188520 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:11:05.308125 2188520 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:11:05.308147 2188520 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:11:05.308156 2188520 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:11:05.308164 2188520 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:11:05.308172 2188520 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:11:05.309219 2188520 image.go:273] response: 
	I0916 11:11:05.444474 2188520 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:11:05.444518 2188520 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:11:05.444554 2188520 start.go:360] acquireMachinesLock for multinode-890146-m03: {Name:mk4764c4fdb22180b7ea128bd84d3a6de8c41b19 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:11:05.444633 2188520 start.go:364] duration metric: took 55.523µs to acquireMachinesLock for "multinode-890146-m03"
	I0916 11:11:05.444660 2188520 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:11:05.444672 2188520 fix.go:54] fixHost starting: m03
	I0916 11:11:05.444969 2188520 cli_runner.go:164] Run: docker container inspect multinode-890146-m03 --format={{.State.Status}}
	I0916 11:11:05.471297 2188520 fix.go:112] recreateIfNeeded on multinode-890146-m03: state=Stopped err=<nil>
	W0916 11:11:05.471340 2188520 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:11:05.473890 2188520 out.go:177] * Restarting existing docker container for "multinode-890146-m03" ...
	I0916 11:11:05.475860 2188520 cli_runner.go:164] Run: docker start multinode-890146-m03
	I0916 11:11:05.776418 2188520 cli_runner.go:164] Run: docker container inspect multinode-890146-m03 --format={{.State.Status}}
	I0916 11:11:05.796636 2188520 kic.go:430] container "multinode-890146-m03" state is running.
	I0916 11:11:05.797007 2188520 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m03
	I0916 11:11:05.822657 2188520 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:11:05.822943 2188520 machine.go:93] provisionDockerMachine start ...
	I0916 11:11:05.823004 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m03
	I0916 11:11:05.844437 2188520 main.go:141] libmachine: Using SSH client type: native
	I0916 11:11:05.844808 2188520 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40747 <nil> <nil>}
	I0916 11:11:05.844821 2188520 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:11:05.845511 2188520 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:11:08.986324 2188520 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146-m03
	
	I0916 11:11:08.986349 2188520 ubuntu.go:169] provisioning hostname "multinode-890146-m03"
	I0916 11:11:08.986416 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m03
	I0916 11:11:09.007680 2188520 main.go:141] libmachine: Using SSH client type: native
	I0916 11:11:09.007945 2188520 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40747 <nil> <nil>}
	I0916 11:11:09.007962 2188520 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-890146-m03 && echo "multinode-890146-m03" | sudo tee /etc/hostname
	I0916 11:11:09.168189 2188520 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146-m03
	
	I0916 11:11:09.168270 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m03
	I0916 11:11:09.188237 2188520 main.go:141] libmachine: Using SSH client type: native
	I0916 11:11:09.188477 2188520 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40747 <nil> <nil>}
	I0916 11:11:09.188499 2188520 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-890146-m03' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-890146-m03/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-890146-m03' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:11:09.327599 2188520 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:11:09.327642 2188520 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 11:11:09.327659 2188520 ubuntu.go:177] setting up certificates
	I0916 11:11:09.327674 2188520 provision.go:84] configureAuth start
	I0916 11:11:09.327754 2188520 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m03
	I0916 11:11:09.347874 2188520 provision.go:143] copyHostCerts
	I0916 11:11:09.347917 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:11:09.347951 2188520 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 11:11:09.347962 2188520 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:11:09.348044 2188520 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 11:11:09.348138 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:11:09.348163 2188520 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 11:11:09.348168 2188520 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:11:09.348194 2188520 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 11:11:09.348237 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:11:09.348257 2188520 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 11:11:09.348266 2188520 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:11:09.348292 2188520 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 11:11:09.348341 2188520 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.multinode-890146-m03 san=[127.0.0.1 192.168.58.4 localhost minikube multinode-890146-m03]
	I0916 11:11:09.991676 2188520 provision.go:177] copyRemoteCerts
	I0916 11:11:09.991748 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:11:09.991795 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m03
	I0916 11:11:10.060012 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40747 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m03/id_rsa Username:docker}
	I0916 11:11:10.166669 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:11:10.166828 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 11:11:10.196877 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:11:10.196980 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1229 bytes)
	I0916 11:11:10.223332 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:11:10.223400 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:11:10.251910 2188520 provision.go:87] duration metric: took 924.213944ms to configureAuth
	I0916 11:11:10.251935 2188520 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:11:10.252185 2188520 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:11:10.252193 2188520 machine.go:96] duration metric: took 4.429241923s to provisionDockerMachine
	I0916 11:11:10.252202 2188520 start.go:293] postStartSetup for "multinode-890146-m03" (driver="docker")
	I0916 11:11:10.252212 2188520 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:11:10.252260 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:11:10.252301 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m03
	I0916 11:11:10.271788 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40747 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m03/id_rsa Username:docker}
	I0916 11:11:10.369137 2188520 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:11:10.372537 2188520 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:11:10.372555 2188520 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:11:10.372561 2188520 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:11:10.372567 2188520 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:11:10.372572 2188520 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:11:10.372575 2188520 command_runner.go:130] > ID=ubuntu
	I0916 11:11:10.372579 2188520 command_runner.go:130] > ID_LIKE=debian
	I0916 11:11:10.372583 2188520 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:11:10.372588 2188520 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:11:10.372594 2188520 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:11:10.372601 2188520 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:11:10.372605 2188520 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:11:10.372651 2188520 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:11:10.372676 2188520 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:11:10.372686 2188520 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:11:10.372693 2188520 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:11:10.372704 2188520 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 11:11:10.372767 2188520 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 11:11:10.372847 2188520 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 11:11:10.372856 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 11:11:10.372961 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:11:10.382136 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:11:10.408071 2188520 start.go:296] duration metric: took 155.853526ms for postStartSetup
	I0916 11:11:10.408234 2188520 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:11:10.408306 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m03
	I0916 11:11:10.428724 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40747 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m03/id_rsa Username:docker}
	I0916 11:11:10.523881 2188520 command_runner.go:130] > 22%
	I0916 11:11:10.523956 2188520 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:11:10.528499 2188520 command_runner.go:130] > 153G
	I0916 11:11:10.528529 2188520 fix.go:56] duration metric: took 5.083854954s for fixHost
	I0916 11:11:10.528540 2188520 start.go:83] releasing machines lock for "multinode-890146-m03", held for 5.083894724s
	I0916 11:11:10.528609 2188520 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m03
	I0916 11:11:10.549411 2188520 out.go:177] * Found network options:
	I0916 11:11:10.551633 2188520 out.go:177]   - NO_PROXY=192.168.58.2,192.168.58.3
	W0916 11:11:10.553809 2188520 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:11:10.553851 2188520 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:11:10.553877 2188520 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:11:10.553888 2188520 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:11:10.553964 2188520 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:11:10.554004 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m03
	I0916 11:11:10.554188 2188520 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:11:10.554245 2188520 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m03
	I0916 11:11:10.582478 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40747 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m03/id_rsa Username:docker}
	I0916 11:11:10.586024 2188520 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40747 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m03/id_rsa Username:docker}
	I0916 11:11:10.798994 2188520 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:11:10.802158 2188520 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:11:10.802178 2188520 command_runner.go:130] >   Size: 78        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:11:10.802187 2188520 command_runner.go:130] > Device: 100055h/1048661d	Inode: 1336159     Links: 1
	I0916 11:11:10.802194 2188520 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:11:10.802200 2188520 command_runner.go:130] > Access: 2024-09-16 11:11:06.355141155 +0000
	I0916 11:11:10.802206 2188520 command_runner.go:130] > Modify: 2024-09-16 11:09:11.663753506 +0000
	I0916 11:11:10.802211 2188520 command_runner.go:130] > Change: 2024-09-16 11:09:11.663753506 +0000
	I0916 11:11:10.802216 2188520 command_runner.go:130] >  Birth: 2024-09-16 11:09:11.663753506 +0000
	I0916 11:11:10.802288 2188520 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 11:11:10.820489 2188520 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:11:10.820582 2188520 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:11:10.829843 2188520 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:11:10.829913 2188520 start.go:495] detecting cgroup driver to use...
	I0916 11:11:10.829996 2188520 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:11:10.830056 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 11:11:10.842870 2188520 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 11:11:10.856053 2188520 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:11:10.856171 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:11:10.871354 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:11:10.884124 2188520 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:11:10.986652 2188520 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:11:11.085534 2188520 docker.go:233] disabling docker service ...
	I0916 11:11:11.085611 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:11:11.100247 2188520 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:11:11.115881 2188520 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:11:11.232805 2188520 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:11:11.329265 2188520 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:11:11.341720 2188520 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:11:11.358792 2188520 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0916 11:11:11.360338 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 11:11:11.370644 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 11:11:11.381113 2188520 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 11:11:11.381186 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 11:11:11.392249 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:11:11.402745 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 11:11:11.413584 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:11:11.424638 2188520 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:11:11.434787 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 11:11:11.445761 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 11:11:11.457946 2188520 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 11:11:11.469323 2188520 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:11:11.476903 2188520 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:11:11.478028 2188520 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:11:11.495563 2188520 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:11:11.588188 2188520 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 11:11:11.739806 2188520 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 11:11:11.739878 2188520 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 11:11:11.743321 2188520 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0916 11:11:11.743350 2188520 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:11:11.743358 2188520 command_runner.go:130] > Device: 10005eh/1048670d	Inode: 169         Links: 1
	I0916 11:11:11.743376 2188520 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:11:11.743383 2188520 command_runner.go:130] > Access: 2024-09-16 11:11:11.679112394 +0000
	I0916 11:11:11.743392 2188520 command_runner.go:130] > Modify: 2024-09-16 11:11:11.679112394 +0000
	I0916 11:11:11.743397 2188520 command_runner.go:130] > Change: 2024-09-16 11:11:11.679112394 +0000
	I0916 11:11:11.743401 2188520 command_runner.go:130] >  Birth: -
	I0916 11:11:11.744200 2188520 start.go:563] Will wait 60s for crictl version
	I0916 11:11:11.744273 2188520 ssh_runner.go:195] Run: which crictl
	I0916 11:11:11.749849 2188520 command_runner.go:130] > /usr/bin/crictl
	I0916 11:11:11.749957 2188520 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:11:11.805929 2188520 command_runner.go:130] > Version:  0.1.0
	I0916 11:11:11.806004 2188520 command_runner.go:130] > RuntimeName:  containerd
	I0916 11:11:11.806025 2188520 command_runner.go:130] > RuntimeVersion:  1.7.22
	I0916 11:11:11.806041 2188520 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:11:11.806092 2188520 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 11:11:11.806180 2188520 ssh_runner.go:195] Run: containerd --version
	I0916 11:11:11.827441 2188520 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:11:11.829641 2188520 ssh_runner.go:195] Run: containerd --version
	I0916 11:11:11.851713 2188520 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:11:11.857757 2188520 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 11:11:11.859557 2188520 out.go:177]   - env NO_PROXY=192.168.58.2
	I0916 11:11:11.861730 2188520 out.go:177]   - env NO_PROXY=192.168.58.2,192.168.58.3
	I0916 11:11:11.863707 2188520 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:11:11.879412 2188520 ssh_runner.go:195] Run: grep 192.168.58.1	host.minikube.internal$ /etc/hosts
	I0916 11:11:11.883128 2188520 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.58.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:11:11.894766 2188520 mustload.go:65] Loading cluster: multinode-890146
	I0916 11:11:11.895032 2188520 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:11:11.895294 2188520 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:11:11.911996 2188520 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:11:11.912281 2188520 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146 for IP: 192.168.58.4
	I0916 11:11:11.912296 2188520 certs.go:194] generating shared ca certs ...
	I0916 11:11:11.912310 2188520 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:11:11.912422 2188520 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 11:11:11.912470 2188520 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 11:11:11.912484 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:11:11.912499 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:11:11.912519 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:11:11.912534 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:11:11.912592 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 11:11:11.912625 2188520 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 11:11:11.912637 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:11:11.912662 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 11:11:11.912689 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:11:11.912714 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 11:11:11.912761 2188520 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:11:11.912798 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:11.912816 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 11:11:11.912828 2188520 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 11:11:11.912853 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:11:11.938948 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 11:11:11.972671 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:11:11.999274 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 11:11:12.039186 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:11:12.067424 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 11:11:12.097519 2188520 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 11:11:12.125143 2188520 ssh_runner.go:195] Run: openssl version
	I0916 11:11:12.130869 2188520 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:11:12.131277 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 11:11:12.141677 2188520 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 11:11:12.145572 2188520 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:11:12.145621 2188520 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:11:12.145677 2188520 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 11:11:12.152920 2188520 command_runner.go:130] > 3ec20f2e
	I0916 11:11:12.153652 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:11:12.164854 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:11:12.175625 2188520 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:12.179272 2188520 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:12.179722 2188520 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:12.179786 2188520 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:11:12.188007 2188520 command_runner.go:130] > b5213941
	I0916 11:11:12.188407 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:11:12.197822 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 11:11:12.207942 2188520 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 11:11:12.211423 2188520 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:11:12.211667 2188520 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:11:12.211724 2188520 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 11:11:12.218982 2188520 command_runner.go:130] > 51391683
	I0916 11:11:12.219507 2188520 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 11:11:12.229233 2188520 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:11:12.232655 2188520 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:11:12.232694 2188520 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:11:12.232725 2188520 kubeadm.go:934] updating node {m03 192.168.58.4 0 v1.31.1  false true} ...
	I0916 11:11:12.232821 2188520 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-890146-m03 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.58.4
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:11:12.232916 2188520 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:11:12.240877 2188520 command_runner.go:130] > kubeadm
	I0916 11:11:12.240899 2188520 command_runner.go:130] > kubectl
	I0916 11:11:12.240904 2188520 command_runner.go:130] > kubelet
	I0916 11:11:12.242192 2188520 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:11:12.242279 2188520 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 11:11:12.251396 2188520 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (324 bytes)
	I0916 11:11:12.277743 2188520 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:11:12.297868 2188520 ssh_runner.go:195] Run: grep 192.168.58.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:11:12.301297 2188520 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.58.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:11:12.312313 2188520 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:11:12.404028 2188520 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:11:12.416373 2188520 start.go:235] Will wait 6m0s for node &{Name:m03 IP:192.168.58.4 Port:0 KubernetesVersion:v1.31.1 ContainerRuntime: ControlPlane:false Worker:true}
	I0916 11:11:12.416829 2188520 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:11:12.420596 2188520 out.go:177] * Verifying Kubernetes components...
	I0916 11:11:12.422312 2188520 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:11:12.503034 2188520 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:11:12.515469 2188520 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:11:12.515738 2188520 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:11:12.516011 2188520 node_ready.go:35] waiting up to 6m0s for node "multinode-890146-m03" to be "Ready" ...
	I0916 11:11:12.516105 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:12.516116 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:12.516125 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:12.516130 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:12.518489 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:12.518512 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:12.518521 2188520 round_trippers.go:580]     Audit-Id: b004e76b-b117-4108-b4f4-13f809de417c
	I0916 11:11:12.518526 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:12.518530 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:12.518533 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:12.518536 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:12.518539 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:12 GMT
	I0916 11:11:12.519091 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:13.016853 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:13.016884 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:13.016894 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:13.016899 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:13.019465 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:13.019538 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:13.019560 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:13 GMT
	I0916 11:11:13.019580 2188520 round_trippers.go:580]     Audit-Id: 08ff096c-189c-4641-a2d1-e10b1e7464c0
	I0916 11:11:13.019610 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:13.019633 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:13.019649 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:13.019664 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:13.019842 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:13.517015 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:13.517042 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:13.517052 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:13.517060 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:13.519590 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:13.519688 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:13.519712 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:13.519729 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:13.519762 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:13 GMT
	I0916 11:11:13.519772 2188520 round_trippers.go:580]     Audit-Id: 38e4ebd5-9fee-4946-861a-4c84bb08f35e
	I0916 11:11:13.519775 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:13.519778 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:13.519946 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:14.017114 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:14.017142 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:14.017153 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:14.017158 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:14.019522 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:14.019551 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:14.019559 2188520 round_trippers.go:580]     Audit-Id: 63168e79-5622-485f-aedf-fe15ad638501
	I0916 11:11:14.019584 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:14.019588 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:14.019591 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:14.019594 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:14.019598 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:14 GMT
	I0916 11:11:14.019784 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:14.516290 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:14.516319 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:14.516329 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:14.516333 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:14.518600 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:14.518624 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:14.518633 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:14.518639 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:14.518642 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:14 GMT
	I0916 11:11:14.518645 2188520 round_trippers.go:580]     Audit-Id: 7e2077fe-4881-43bb-98ad-b2fe879722ef
	I0916 11:11:14.518649 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:14.518653 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:14.518900 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:14.519342 2188520 node_ready.go:53] node "multinode-890146-m03" has status "Ready":"Unknown"
	I0916 11:11:15.017638 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:15.017722 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:15.017749 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:15.017769 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:15.038426 2188520 round_trippers.go:574] Response Status: 200 OK in 20 milliseconds
	I0916 11:11:15.038515 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:15.038541 2188520 round_trippers.go:580]     Audit-Id: 7a128a14-c56c-4511-bf5e-6de850145a63
	I0916 11:11:15.038559 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:15.038591 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:15.038614 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:15.038630 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:15.038648 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:15 GMT
	I0916 11:11:15.039453 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:15.516267 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:15.516297 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:15.516308 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:15.516312 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:15.518457 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:15.518518 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:15.518539 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:15.518558 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:15.518572 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:15.518600 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:15.518622 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:15 GMT
	I0916 11:11:15.518639 2188520 round_trippers.go:580]     Audit-Id: dbe2f8f5-6dbc-4841-8a00-5ca979c181d8
	I0916 11:11:15.518848 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:16.016264 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:16.016289 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:16.016299 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:16.016303 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:16.018630 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:16.018655 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:16.018664 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:16.018669 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:16.018705 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:16.018713 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:16 GMT
	I0916 11:11:16.018716 2188520 round_trippers.go:580]     Audit-Id: e04ddfc6-94c1-48dd-b83f-0e66ebe5f047
	I0916 11:11:16.018719 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:16.018881 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:16.517111 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:16.517136 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:16.517149 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:16.517154 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:16.519624 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:16.519693 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:16.519702 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:16 GMT
	I0916 11:11:16.519705 2188520 round_trippers.go:580]     Audit-Id: 92f7e26d-271b-410b-904d-d8b98ea9d3eb
	I0916 11:11:16.519709 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:16.519713 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:16.519718 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:16.519722 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:16.519851 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:16.520278 2188520 node_ready.go:53] node "multinode-890146-m03" has status "Ready":"Unknown"
	I0916 11:11:17.016345 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:17.016374 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:17.016384 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:17.016390 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:17.019038 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:17.019125 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:17.019169 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:17 GMT
	I0916 11:11:17.019222 2188520 round_trippers.go:580]     Audit-Id: d5b86f82-6a1b-4a7d-89d8-a41b72bcae19
	I0916 11:11:17.019457 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:17.019467 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:17.019471 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:17.019474 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:17.019700 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:17.516505 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:17.516528 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:17.516538 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:17.516541 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:17.518823 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:17.518894 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:17.518909 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:17.518914 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:17.518918 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:17.518921 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:17.518924 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:17 GMT
	I0916 11:11:17.518926 2188520 round_trippers.go:580]     Audit-Id: 4b07f96e-60ae-472f-9fb8-46ed5eca2af4
	I0916 11:11:17.519774 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:18.016972 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:18.017005 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:18.017015 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:18.017024 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:18.019546 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:18.019572 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:18.019581 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:18.019587 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:18.019592 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:18.019597 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:18 GMT
	I0916 11:11:18.019601 2188520 round_trippers.go:580]     Audit-Id: 0b9dd6eb-ff02-4e43-8f2b-80b02bda0b35
	I0916 11:11:18.019604 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:18.020161 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:18.516377 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:18.516404 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:18.516414 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:18.516419 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:18.518847 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:18.518869 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:18.518877 2188520 round_trippers.go:580]     Audit-Id: c93ee637-9534-4e51-aa1f-4287e20d5f7d
	I0916 11:11:18.518882 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:18.518885 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:18.518888 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:18.518891 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:18.518893 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:18 GMT
	I0916 11:11:18.519012 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:19.016275 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:19.016303 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:19.016313 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:19.016318 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:19.018578 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:19.018603 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:19.018611 2188520 round_trippers.go:580]     Audit-Id: 47094c77-20d1-492c-8393-935d71be5dcd
	I0916 11:11:19.018615 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:19.018619 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:19.018622 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:19.018625 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:19.018628 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:19 GMT
	I0916 11:11:19.018774 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:19.019163 2188520 node_ready.go:53] node "multinode-890146-m03" has status "Ready":"Unknown"
	I0916 11:11:19.516759 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:19.516784 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:19.516793 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:19.516799 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:19.519198 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:19.519217 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:19.519225 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:19.519229 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:19.519232 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:19.519235 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:19 GMT
	I0916 11:11:19.519238 2188520 round_trippers.go:580]     Audit-Id: cff636bd-f904-42b9-a95f-6831bd2d0627
	I0916 11:11:19.519240 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:19.519507 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"830","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5679 chars]
	I0916 11:11:20.016259 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:20.016284 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.016293 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.016297 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.022310 2188520 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:11:20.022338 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.022347 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.022351 2188520 round_trippers.go:580]     Audit-Id: 008cb872-f850-4632-9126-a3da0ca08ef7
	I0916 11:11:20.022356 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.022359 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.022363 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.022366 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.023484 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"904","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5252 chars]
	I0916 11:11:20.023893 2188520 node_ready.go:49] node "multinode-890146-m03" has status "Ready":"True"
	I0916 11:11:20.023907 2188520 node_ready.go:38] duration metric: took 7.507878215s for node "multinode-890146-m03" to be "Ready" ...
	I0916 11:11:20.023918 2188520 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:11:20.023993 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:11:20.023999 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.024007 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.024013 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.037241 2188520 round_trippers.go:574] Response Status: 200 OK in 13 milliseconds
	I0916 11:11:20.037266 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.037276 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.037282 2188520 round_trippers.go:580]     Audit-Id: 2982641d-7e11-42c3-8870-6095c777f6c5
	I0916 11:11:20.037285 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.037288 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.037291 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.037294 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.038193 2188520 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"904"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"792","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 91110 chars]
	I0916 11:11:20.042056 2188520 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.042152 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:11:20.042160 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.042169 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.042173 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.047797 2188520 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:11:20.047882 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.047919 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.047928 2188520 round_trippers.go:580]     Audit-Id: 85f20635-e0cc-4f04-b616-5ec2d2677229
	I0916 11:11:20.047932 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.047941 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.047944 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.047947 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.048573 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"792","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6693 chars]
	I0916 11:11:20.049146 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:11:20.049157 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.049167 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.049172 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.051655 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.051673 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.051681 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.051686 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.051689 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.051693 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.051695 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.051698 2188520 round_trippers.go:580]     Audit-Id: 8c7e7747-40df-4227-b5a6-524a4a236f58
	I0916 11:11:20.052404 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:11:20.052858 2188520 pod_ready.go:93] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:20.052889 2188520 pod_ready.go:82] duration metric: took 10.804274ms for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.052912 2188520 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.053006 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-890146
	I0916 11:11:20.053012 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.053019 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.053024 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.055255 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.055273 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.055281 2188520 round_trippers.go:580]     Audit-Id: ac5af74a-2277-4cd3-b491-7c950f69d1f5
	I0916 11:11:20.055286 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.055289 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.055292 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.055295 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.055299 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.055967 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-890146","namespace":"kube-system","uid":"59c960ab-f6dd-4ed3-856c-ba2a02295b12","resourceVersion":"781","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.58.2:2379","kubernetes.io/config.hash":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.mirror":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.seen":"2024-09-16T11:07:32.783608135Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6653 chars]
	I0916 11:11:20.056577 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:11:20.056589 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.056598 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.056602 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.058544 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:20.058567 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.058575 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.058579 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.058583 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.058592 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.058596 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.058600 2188520 round_trippers.go:580]     Audit-Id: a3bae8ad-391c-4e11-8b84-5d72aa798883
	I0916 11:11:20.059366 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:11:20.059880 2188520 pod_ready.go:93] pod "etcd-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:20.059903 2188520 pod_ready.go:82] duration metric: took 6.978588ms for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.059970 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.060079 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-890146
	I0916 11:11:20.060087 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.060095 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.060100 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.062285 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.062301 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.062309 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.062315 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.062360 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.062366 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.062369 2188520 round_trippers.go:580]     Audit-Id: aa5983b6-1a75-4a32-8973-70e74f6e82a8
	I0916 11:11:20.062372 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.062589 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-890146","namespace":"kube-system","uid":"9846229d-7227-453f-8c30-697aaba61648","resourceVersion":"771","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.58.2:8443","kubernetes.io/config.hash":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.mirror":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.seen":"2024-09-16T11:07:32.783610957Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8731 chars]
	I0916 11:11:20.063333 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:11:20.063348 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.063356 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.063397 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.065547 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.065570 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.065579 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.065609 2188520 round_trippers.go:580]     Audit-Id: 0f96989f-bee9-48a9-9420-ed6a2ab6bde0
	I0916 11:11:20.065619 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.065624 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.065634 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.065638 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.065876 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:11:20.066288 2188520 pod_ready.go:93] pod "kube-apiserver-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:20.066309 2188520 pod_ready.go:82] duration metric: took 6.331338ms for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.066332 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.066417 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-890146
	I0916 11:11:20.066431 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.066439 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.066455 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.068599 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.068622 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.068630 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.068636 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.068656 2188520 round_trippers.go:580]     Audit-Id: 0972ffbd-2e9f-4781-90e3-93446be708ac
	I0916 11:11:20.068664 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.068667 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.068670 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.068924 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-890146","namespace":"kube-system","uid":"421b15a5-a8e2-4631-bf18-1592c8747b09","resourceVersion":"773","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.mirror":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.seen":"2024-09-16T11:07:32.783594137Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8306 chars]
	I0916 11:11:20.069512 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:11:20.069533 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.069542 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.069546 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.071544 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:20.071567 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.071574 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.071579 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.071599 2188520 round_trippers.go:580]     Audit-Id: ec44ba50-722f-449c-9aab-599db5762be1
	I0916 11:11:20.071609 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.071613 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.071615 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.071879 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:11:20.072285 2188520 pod_ready.go:93] pod "kube-controller-manager-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:20.072303 2188520 pod_ready.go:82] duration metric: took 5.957892ms for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.072334 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.216569 2188520 request.go:632] Waited for 144.163708ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:20.216708 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:11:20.216718 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.216727 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.216733 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.219110 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.219134 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.219143 2188520 round_trippers.go:580]     Audit-Id: b0fb5b47-df95-4884-a0ef-e47cc77bb385
	I0916 11:11:20.219147 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.219152 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.219155 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.219159 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.219161 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.219589 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"885","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6396 chars]
	I0916 11:11:20.416362 2188520 request.go:632] Waited for 196.248529ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:20.416511 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:11:20.416524 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.416534 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.416541 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.419026 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.419048 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.419057 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.419067 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.419098 2188520 round_trippers.go:580]     Audit-Id: fc1a8e5f-a173-47f2-bb1f-1de4533de638
	I0916 11:11:20.419102 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.419111 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.419115 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.419552 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:11:20.419955 2188520 pod_ready.go:93] pod "kube-proxy-59f9h" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:20.419967 2188520 pod_ready.go:82] duration metric: took 347.622972ms for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.419978 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.616966 2188520 request.go:632] Waited for 196.917949ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:11:20.617030 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:11:20.617036 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.617045 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.617050 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.619537 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.619576 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.619592 2188520 round_trippers.go:580]     Audit-Id: 26c38eef-313b-4efb-acd7-2d865a7a4273
	I0916 11:11:20.619599 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.619603 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.619618 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.619621 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.619624 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.620012 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"759","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6388 chars]
	I0916 11:11:20.816941 2188520 request.go:632] Waited for 196.371089ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:11:20.817008 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:11:20.817014 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:20.817022 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:20.817031 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:20.819404 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:20.819427 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:20.819435 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:20 GMT
	I0916 11:11:20.819439 2188520 round_trippers.go:580]     Audit-Id: 71b07a6f-77a1-4195-9e29-e46f87ef9a93
	I0916 11:11:20.819442 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:20.819445 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:20.819448 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:20.819451 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:20.819591 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:11:20.819989 2188520 pod_ready.go:93] pod "kube-proxy-fm5qr" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:20.820008 2188520 pod_ready.go:82] duration metric: took 400.023163ms for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:20.820018 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-vl27g" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:21.017000 2188520 request.go:632] Waited for 196.891989ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:21.017074 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:21.017082 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:21.017093 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:21.017102 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:21.019890 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:21.019994 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:21.020021 2188520 round_trippers.go:580]     Audit-Id: d50b2d4f-1a40-4414-ab7b-c7c408f2e263
	I0916 11:11:21.020078 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:21.020111 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:21.020137 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:21.020167 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:21.020192 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:21 GMT
	I0916 11:11:21.020359 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:21.217294 2188520 request.go:632] Waited for 196.349903ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:21.217356 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:21.217362 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:21.217371 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:21.217380 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:21.219653 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:21.219722 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:21.219743 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:21 GMT
	I0916 11:11:21.219762 2188520 round_trippers.go:580]     Audit-Id: ceb7253c-3106-4ae3-ae4c-7999fdee563a
	I0916 11:11:21.219826 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:21.219844 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:21.219853 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:21.219858 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:21.219979 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"904","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5252 chars]
	I0916 11:11:21.416597 2188520 request.go:632] Waited for 96.275778ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:21.416714 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:21.416754 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:21.416770 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:21.416775 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:21.419258 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:21.419327 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:21.419351 2188520 round_trippers.go:580]     Audit-Id: 47675a2b-49b6-45a6-94b7-49f264df4eaf
	I0916 11:11:21.419365 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:21.419370 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:21.419374 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:21.419378 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:21.419380 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:21 GMT
	I0916 11:11:21.419823 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:21.616746 2188520 request.go:632] Waited for 196.350223ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:21.616826 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:21.616839 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:21.616848 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:21.616854 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:21.619223 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:21.619269 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:21.619279 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:21.619283 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:21.619287 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:21.619290 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:21.619294 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:21 GMT
	I0916 11:11:21.619297 2188520 round_trippers.go:580]     Audit-Id: 1d5788ae-517d-4c42-823a-cd14152ab8bf
	I0916 11:11:21.619622 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"904","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5252 chars]
	I0916 11:11:21.820397 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:21.820424 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:21.820434 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:21.820445 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:21.823076 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:21.823141 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:21.823174 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:21.823191 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:21.823215 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:21.823234 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:21.823244 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:21 GMT
	I0916 11:11:21.823247 2188520 round_trippers.go:580]     Audit-Id: 1138a2cc-9da8-4de8-97f9-0c00937a384b
	I0916 11:11:21.823379 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:22.017277 2188520 request.go:632] Waited for 193.362948ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:22.017341 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:22.017348 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:22.017364 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:22.017372 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:22.019989 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:22.020072 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:22.020098 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:22.020120 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:22.020142 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:22.020149 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:22.020152 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:22 GMT
	I0916 11:11:22.020169 2188520 round_trippers.go:580]     Audit-Id: b88f675c-83ba-47e5-a3d3-b29194f2aa9e
	I0916 11:11:22.020328 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"904","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5252 chars]
	I0916 11:11:22.320219 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:22.320244 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:22.320253 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:22.320259 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:22.322863 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:22.322934 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:22.322951 2188520 round_trippers.go:580]     Audit-Id: e0b0b233-b132-4ac6-ab97-cb4055a6e0f9
	I0916 11:11:22.322957 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:22.322961 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:22.322964 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:22.322967 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:22.322970 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:22 GMT
	I0916 11:11:22.323117 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:22.416929 2188520 request.go:632] Waited for 93.238478ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:22.416993 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:22.417005 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:22.417018 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:22.417027 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:22.419516 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:22.419543 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:22.419552 2188520 round_trippers.go:580]     Audit-Id: deda9b03-a29f-48a7-a257-ccf175b7380b
	I0916 11:11:22.419558 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:22.419561 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:22.419564 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:22.419567 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:22.419571 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:22 GMT
	I0916 11:11:22.419841 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"904","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5252 chars]
	I0916 11:11:22.820604 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:22.820628 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:22.820637 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:22.820642 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:22.823152 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:22.823183 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:22.823192 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:22.823196 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:22.823200 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:22 GMT
	I0916 11:11:22.823203 2188520 round_trippers.go:580]     Audit-Id: aa5944da-5d07-40f1-8665-9044ef3f7aac
	I0916 11:11:22.823206 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:22.823208 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:22.823519 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:22.824075 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:22.824092 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:22.824101 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:22.824105 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:22.826115 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:22.826135 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:22.826143 2188520 round_trippers.go:580]     Audit-Id: 1d2ad5c1-5d52-44fe-b1a7-2cdcda659e07
	I0916 11:11:22.826147 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:22.826150 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:22.826152 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:22.826155 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:22.826165 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:22 GMT
	I0916 11:11:22.826431 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"904","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5252 chars]
	I0916 11:11:22.826858 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:11:23.320487 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:23.320512 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:23.320522 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:23.320526 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:23.322871 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:23.322906 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:23.322915 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:23 GMT
	I0916 11:11:23.322921 2188520 round_trippers.go:580]     Audit-Id: 15843365-0e71-4cce-bac0-43d14a395d7f
	I0916 11:11:23.322927 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:23.322930 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:23.322933 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:23.322936 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:23.323119 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:23.323665 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:23.323685 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:23.323693 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:23.323697 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:23.325796 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:23.325862 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:23.325887 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:23.325902 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:23.325972 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:23.325988 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:23 GMT
	I0916 11:11:23.325991 2188520 round_trippers.go:580]     Audit-Id: ca732396-171d-4561-bbae-3b6df69ca277
	I0916 11:11:23.325994 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:23.326110 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"904","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5252 chars]
	I0916 11:11:23.820923 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:23.820947 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:23.820957 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:23.820962 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:23.823288 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:23.823317 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:23.823325 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:23.823329 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:23.823332 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:23.823335 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:23.823338 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:23 GMT
	I0916 11:11:23.823340 2188520 round_trippers.go:580]     Audit-Id: 8d463026-a0dc-423d-9d2c-c80a400d08e4
	I0916 11:11:23.823476 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:23.824002 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:23.824020 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:23.824029 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:23.824036 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:23.826040 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:23.826065 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:23.826074 2188520 round_trippers.go:580]     Audit-Id: 5a6640c0-db49-4cc4-8ef7-e216281cb4cc
	I0916 11:11:23.826078 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:23.826081 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:23.826103 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:23.826106 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:23.826109 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:23 GMT
	I0916 11:11:23.826498 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"904","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5252 chars]
	I0916 11:11:24.320271 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:24.320294 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:24.320303 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:24.320307 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:24.322804 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:24.322838 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:24.322846 2188520 round_trippers.go:580]     Audit-Id: 0b56dcb7-947e-43c7-b6e3-0342c84c1078
	I0916 11:11:24.322850 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:24.322854 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:24.322858 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:24.322862 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:24.322865 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:24 GMT
	I0916 11:11:24.323148 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:24.323690 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:24.323706 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:24.323716 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:24.323721 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:24.325849 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:24.325874 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:24.325883 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:24.325889 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:24.325895 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:24 GMT
	I0916 11:11:24.325898 2188520 round_trippers.go:580]     Audit-Id: e0dfbb60-d547-4774-a99d-6381f91cd032
	I0916 11:11:24.325900 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:24.325903 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:24.326184 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"907","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5130 chars]
	I0916 11:11:24.820928 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:24.820958 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:24.820968 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:24.820974 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:24.823603 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:24.823638 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:24.823648 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:24 GMT
	I0916 11:11:24.823652 2188520 round_trippers.go:580]     Audit-Id: 0afe5ea4-e3ad-4bb1-9cef-dabe863add9e
	I0916 11:11:24.823656 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:24.823676 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:24.823688 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:24.823694 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:24.823971 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:24.824501 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:24.824520 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:24.824530 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:24.824535 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:24.826836 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:24.826865 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:24.826874 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:24.826878 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:24.826882 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:24.826889 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:24.826892 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:24 GMT
	I0916 11:11:24.826897 2188520 round_trippers.go:580]     Audit-Id: c255be7c-8aef-44e2-a8da-f053514c9351
	I0916 11:11:24.827292 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"907","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5130 chars]
	I0916 11:11:24.827824 2188520 pod_ready.go:103] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"False"
	I0916 11:11:25.320555 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:25.320577 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:25.320587 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.320592 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.323621 2188520 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:11:25.323690 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:25.323713 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.323729 2188520 round_trippers.go:580]     Audit-Id: a74ca9df-feed-4df4-9d63-d184fbc1fc93
	I0916 11:11:25.323746 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.323763 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.323777 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:25.323793 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:25.323921 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:25.324493 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:25.324513 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:25.324523 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.324529 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.326538 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:25.326579 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:25.326588 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:25.326594 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.326598 2188520 round_trippers.go:580]     Audit-Id: 71385f6b-fb45-4262-a273-f599fb2a7eab
	I0916 11:11:25.326602 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.326605 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.326609 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:25.326862 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"907","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5130 chars]
	I0916 11:11:25.821001 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:25.821027 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:25.821038 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.821044 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.823630 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.823659 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:25.823669 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.823673 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.823677 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:25.823681 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:25.823684 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.823687 2188520 round_trippers.go:580]     Audit-Id: 5c57e1ec-2386-4e31-99ed-9cc1c4a4ea04
	I0916 11:11:25.823937 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:25.824486 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:25.824505 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:25.824515 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:25.824519 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:25.826846 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:25.826877 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:25.826886 2188520 round_trippers.go:580]     Audit-Id: 64c43677-e256-401a-b999-74521bb02599
	I0916 11:11:25.826890 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:25.826893 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:25.826896 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:25.826898 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:25.826901 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:25 GMT
	I0916 11:11:25.827292 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"907","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5130 chars]
	I0916 11:11:26.320251 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:26.320281 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:26.320297 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.320302 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.322634 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:26.322768 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:26.322794 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:26.322849 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:26.322869 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.322885 2188520 round_trippers.go:580]     Audit-Id: c9c0fb91-c934-4058-84cc-663ff0d868a5
	I0916 11:11:26.322900 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.322932 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.323783 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:26.324333 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:26.324353 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:26.324363 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.324367 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.326879 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:26.326948 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:26.326970 2188520 round_trippers.go:580]     Audit-Id: 4357ee8d-8736-4908-9745-b5bb751bffa5
	I0916 11:11:26.326986 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.327065 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.327080 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:26.327084 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:26.327088 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.327225 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"907","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5130 chars]
	I0916 11:11:26.820443 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:26.820471 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:26.820481 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.820490 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.822666 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:26.822708 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:26.822717 2188520 round_trippers.go:580]     Audit-Id: 38b8560e-54b3-4131-8968-b72ab37e6f5d
	I0916 11:11:26.822729 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.822733 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.822736 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:26.822739 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:26.822743 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.823324 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"667","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6587 chars]
	I0916 11:11:26.823849 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:26.823866 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:26.823875 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:26.823881 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:26.825690 2188520 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:11:26.825706 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:26.825714 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:26.825719 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:26.825725 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:26.825728 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:26.825731 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:26 GMT
	I0916 11:11:26.825734 2188520 round_trippers.go:580]     Audit-Id: 629a4e30-7543-490a-b883-f4b02cc2d9f6
	I0916 11:11:26.826346 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"907","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5130 chars]
	I0916 11:11:27.320415 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:11:27.320442 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:27.320451 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:27.320458 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:27.324660 2188520 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:11:27.324686 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:27.324694 2188520 round_trippers.go:580]     Audit-Id: de0562e5-3f58-446c-ab1d-b9f57742df6a
	I0916 11:11:27.324699 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:27.324702 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:27.324705 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:27.324708 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:27.324711 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:27 GMT
	I0916 11:11:27.325023 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"945","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6396 chars]
	I0916 11:11:27.325539 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:11:27.325549 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:27.325557 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:27.325560 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:27.328024 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:27.328042 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:27.328050 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:27.328054 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:27 GMT
	I0916 11:11:27.328057 2188520 round_trippers.go:580]     Audit-Id: 209ac46c-156c-4d2e-b1b3-0b65b4008542
	I0916 11:11:27.328059 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:27.328062 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:27.328064 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:27.328164 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m03","uid":"f416f77f-576a-4784-a525-265385b39179","resourceVersion":"907","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m03","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_46_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5130 chars]
	I0916 11:11:27.328523 2188520 pod_ready.go:93] pod "kube-proxy-vl27g" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:27.328534 2188520 pod_ready.go:82] duration metric: took 6.508508729s for pod "kube-proxy-vl27g" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:27.328545 2188520 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:27.328607 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:11:27.328611 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:27.328619 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:27.328623 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:27.330914 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:27.330931 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:27.330938 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:27.330941 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:27.330943 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:27.330946 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:27.330949 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:27 GMT
	I0916 11:11:27.330952 2188520 round_trippers.go:580]     Audit-Id: 64bc8544-5414-4e2e-935e-5a5948498b4a
	I0916 11:11:27.331030 2188520 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"779","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5188 chars]
	I0916 11:11:27.331464 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:11:27.331472 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:27.331480 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:27.331485 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:27.333653 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:27.333674 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:27.333682 2188520 round_trippers.go:580]     Audit-Id: d87167de-e4c7-4bca-bb9d-0ef2597b38a4
	I0916 11:11:27.333686 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:27.333689 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:27.333692 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:27.333695 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:27.333698 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:27 GMT
	I0916 11:11:27.333825 2188520 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:11:27.334190 2188520 pod_ready.go:93] pod "kube-scheduler-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:11:27.334209 2188520 pod_ready.go:82] duration metric: took 5.656798ms for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:11:27.334233 2188520 pod_ready.go:39] duration metric: took 7.310291967s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:11:27.334250 2188520 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:11:27.334304 2188520 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:11:27.346216 2188520 system_svc.go:56] duration metric: took 11.957859ms WaitForService to wait for kubelet
	I0916 11:11:27.346246 2188520 kubeadm.go:582] duration metric: took 14.92978717s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:11:27.346265 2188520 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:11:27.346396 2188520 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes
	I0916 11:11:27.346409 2188520 round_trippers.go:469] Request Headers:
	I0916 11:11:27.346419 2188520 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:11:27.346436 2188520 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:11:27.349272 2188520 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:11:27.349302 2188520 round_trippers.go:577] Response Headers:
	I0916 11:11:27.349311 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:11:27.349315 2188520 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:11:27 GMT
	I0916 11:11:27.349319 2188520 round_trippers.go:580]     Audit-Id: aec43455-ea7a-400a-b63e-0d755b1606ac
	I0916 11:11:27.349322 2188520 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:11:27.349325 2188520 round_trippers.go:580]     Content-Type: application/json
	I0916 11:11:27.349328 2188520 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:11:27.349583 2188520 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"948"},"items":[{"metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"manag
edFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1", [truncated 17796 chars]
	I0916 11:11:27.350614 2188520 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:27.350635 2188520 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:27.350644 2188520 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:27.350649 2188520 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:27.350653 2188520 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:11:27.350656 2188520 node_conditions.go:123] node cpu capacity is 2
	I0916 11:11:27.350661 2188520 node_conditions.go:105] duration metric: took 4.358942ms to run NodePressure ...
	I0916 11:11:27.350694 2188520 start.go:241] waiting for startup goroutines ...
	I0916 11:11:27.350715 2188520 start.go:255] writing updated cluster config ...
	I0916 11:11:27.351158 2188520 ssh_runner.go:195] Run: rm -f paused
	I0916 11:11:27.359753 2188520 out.go:177] * Done! kubectl is now configured to use "multinode-890146" cluster and "default" namespace by default
	E0916 11:11:27.362045 2188520 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	07054c18240bd       ba04bb24b9575       52 seconds ago       Running             storage-provisioner       2                   958cadb9890b2       storage-provisioner
	d3fb5dfba8901       89a35e2ebb6b9       About a minute ago   Running             busybox                   1                   f22f2b114c085       busybox-7dff88458-hf6zl
	0d3abd904fe54       6a23fa8fd2b78       About a minute ago   Running             kindnet-cni               1                   eb8cce93a0817       kindnet-dbrhk
	92299bff0d256       2f6c962e7b831       About a minute ago   Running             coredns                   1                   912884a0aab78       coredns-7c65d6cfc9-vp22b
	c7fed01434f6a       ba04bb24b9575       About a minute ago   Exited              storage-provisioner       1                   958cadb9890b2       storage-provisioner
	654ecbfed03d8       24a140c548c07       About a minute ago   Running             kube-proxy                1                   54bba39675f18       kube-proxy-fm5qr
	07455bc60716a       279f381cb3736       About a minute ago   Running             kube-controller-manager   1                   f5d20f97662ab       kube-controller-manager-multinode-890146
	2cc43e414446d       7f8aa378bb47d       About a minute ago   Running             kube-scheduler            1                   66f010f223721       kube-scheduler-multinode-890146
	5973d4702c823       d3f53a98c0a9d       About a minute ago   Running             kube-apiserver            1                   2303b4bd47452       kube-apiserver-multinode-890146
	9a6e3be38656a       27e3830e14027       About a minute ago   Running             etcd                      1                   87f6e771811d3       etcd-multinode-890146
	24b8d7a28fb62       89a35e2ebb6b9       3 minutes ago        Exited              busybox                   0                   b4e431597f321       busybox-7dff88458-hf6zl
	e8a9035126acc       2f6c962e7b831       3 minutes ago        Exited              coredns                   0                   4d75eb3d0406a       coredns-7c65d6cfc9-vp22b
	eccab3e428039       6a23fa8fd2b78       3 minutes ago        Exited              kindnet-cni               0                   c60887e75f823       kindnet-dbrhk
	88800ca3adcda       24a140c548c07       3 minutes ago        Exited              kube-proxy                0                   a71ab4f91b123       kube-proxy-fm5qr
	e8e11b0a6506f       27e3830e14027       4 minutes ago        Exited              etcd                      0                   d29e1a2d28295       etcd-multinode-890146
	305b8895a3440       d3f53a98c0a9d       4 minutes ago        Exited              kube-apiserver            0                   2812a818d9d32       kube-apiserver-multinode-890146
	424e6c1030bdc       7f8aa378bb47d       4 minutes ago        Exited              kube-scheduler            0                   8856cda765ae4       kube-scheduler-multinode-890146
	9d6ccf43cf5a5       279f381cb3736       4 minutes ago        Exited              kube-controller-manager   0                   8ad0b604e598f       kube-controller-manager-multinode-890146
	
	
	==> containerd <==
	Sep 16 11:09:56 multinode-890146 containerd[578]: time="2024-09-16T11:09:56.816263612Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-7dff88458-hf6zl,Uid:8e7abaaa-be47-456f-9980-53cbfcd75f48,Namespace:default,Attempt:1,} returns sandbox id \"f22f2b114c0852446f2cd61d5c888a03ac1cb22d8cecae0783f124e79c3d3090\""
	Sep 16 11:09:56 multinode-890146 containerd[578]: time="2024-09-16T11:09:56.834818369Z" level=info msg="CreateContainer within sandbox \"f22f2b114c0852446f2cd61d5c888a03ac1cb22d8cecae0783f124e79c3d3090\" for container &ContainerMetadata{Name:busybox,Attempt:1,}"
	Sep 16 11:09:56 multinode-890146 containerd[578]: time="2024-09-16T11:09:56.864325321Z" level=info msg="StartContainer for \"0d3abd904fe54a1978d08d9978009d74bddb24839e5a5ca370830593e207f392\" returns successfully"
	Sep 16 11:09:56 multinode-890146 containerd[578]: time="2024-09-16T11:09:56.898210129Z" level=info msg="StartContainer for \"c7fed01434f6a417aa19b346e2f46371fd7d5ccdf5fdd7ab5ed366a9dfb17507\" returns successfully"
	Sep 16 11:09:56 multinode-890146 containerd[578]: time="2024-09-16T11:09:56.902643156Z" level=info msg="CreateContainer within sandbox \"f22f2b114c0852446f2cd61d5c888a03ac1cb22d8cecae0783f124e79c3d3090\" for &ContainerMetadata{Name:busybox,Attempt:1,} returns container id \"d3fb5dfba89012a419ff5873a16304e9eab7d0b9dba679f84f08d4653fce0c9d\""
	Sep 16 11:09:56 multinode-890146 containerd[578]: time="2024-09-16T11:09:56.904398068Z" level=info msg="StartContainer for \"d3fb5dfba89012a419ff5873a16304e9eab7d0b9dba679f84f08d4653fce0c9d\""
	Sep 16 11:09:56 multinode-890146 containerd[578]: time="2024-09-16T11:09:56.945275134Z" level=info msg="StartContainer for \"92299bff0d2567667e4546c0c60418026422290d336e9613b5aed5df8af84cc5\" returns successfully"
	Sep 16 11:09:57 multinode-890146 containerd[578]: time="2024-09-16T11:09:57.116303852Z" level=info msg="StartContainer for \"d3fb5dfba89012a419ff5873a16304e9eab7d0b9dba679f84f08d4653fce0c9d\" returns successfully"
	Sep 16 11:10:26 multinode-890146 containerd[578]: time="2024-09-16T11:10:26.936919996Z" level=info msg="shim disconnected" id=c7fed01434f6a417aa19b346e2f46371fd7d5ccdf5fdd7ab5ed366a9dfb17507 namespace=k8s.io
	Sep 16 11:10:26 multinode-890146 containerd[578]: time="2024-09-16T11:10:26.936999151Z" level=warning msg="cleaning up after shim disconnected" id=c7fed01434f6a417aa19b346e2f46371fd7d5ccdf5fdd7ab5ed366a9dfb17507 namespace=k8s.io
	Sep 16 11:10:26 multinode-890146 containerd[578]: time="2024-09-16T11:10:26.937011647Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 11:10:27 multinode-890146 containerd[578]: time="2024-09-16T11:10:27.137634933Z" level=info msg="RemoveContainer for \"0ca1a17f4990929d27725b61f7cdbaae3f44772041814b26daa29ba43c68ec37\""
	Sep 16 11:10:27 multinode-890146 containerd[578]: time="2024-09-16T11:10:27.148584995Z" level=info msg="RemoveContainer for \"0ca1a17f4990929d27725b61f7cdbaae3f44772041814b26daa29ba43c68ec37\" returns successfully"
	Sep 16 11:10:41 multinode-890146 containerd[578]: time="2024-09-16T11:10:41.889822492Z" level=info msg="CreateContainer within sandbox \"958cadb9890b220d4d13c484cc8fb1455e617ead72d7c008b14b1b7011faa82b\" for container &ContainerMetadata{Name:storage-provisioner,Attempt:2,}"
	Sep 16 11:10:41 multinode-890146 containerd[578]: time="2024-09-16T11:10:41.912398305Z" level=info msg="CreateContainer within sandbox \"958cadb9890b220d4d13c484cc8fb1455e617ead72d7c008b14b1b7011faa82b\" for &ContainerMetadata{Name:storage-provisioner,Attempt:2,} returns container id \"07054c18240bddcff9366f96beae270b49083c8c0a3d760859239bbf844ea0cd\""
	Sep 16 11:10:41 multinode-890146 containerd[578]: time="2024-09-16T11:10:41.913023024Z" level=info msg="StartContainer for \"07054c18240bddcff9366f96beae270b49083c8c0a3d760859239bbf844ea0cd\""
	Sep 16 11:10:41 multinode-890146 containerd[578]: time="2024-09-16T11:10:41.984976163Z" level=info msg="StartContainer for \"07054c18240bddcff9366f96beae270b49083c8c0a3d760859239bbf844ea0cd\" returns successfully"
	Sep 16 11:10:50 multinode-890146 containerd[578]: time="2024-09-16T11:10:50.864645512Z" level=info msg="StopPodSandbox for \"6e9271efa56914a06d467aaafc55e0a1f754c6b31df8985dd42f2bfdb5ea8dc3\""
	Sep 16 11:10:50 multinode-890146 containerd[578]: time="2024-09-16T11:10:50.864756748Z" level=info msg="TearDown network for sandbox \"6e9271efa56914a06d467aaafc55e0a1f754c6b31df8985dd42f2bfdb5ea8dc3\" successfully"
	Sep 16 11:10:50 multinode-890146 containerd[578]: time="2024-09-16T11:10:50.864770500Z" level=info msg="StopPodSandbox for \"6e9271efa56914a06d467aaafc55e0a1f754c6b31df8985dd42f2bfdb5ea8dc3\" returns successfully"
	Sep 16 11:10:50 multinode-890146 containerd[578]: time="2024-09-16T11:10:50.866077028Z" level=info msg="RemovePodSandbox for \"6e9271efa56914a06d467aaafc55e0a1f754c6b31df8985dd42f2bfdb5ea8dc3\""
	Sep 16 11:10:50 multinode-890146 containerd[578]: time="2024-09-16T11:10:50.866113984Z" level=info msg="Forcibly stopping sandbox \"6e9271efa56914a06d467aaafc55e0a1f754c6b31df8985dd42f2bfdb5ea8dc3\""
	Sep 16 11:10:50 multinode-890146 containerd[578]: time="2024-09-16T11:10:50.866180946Z" level=info msg="TearDown network for sandbox \"6e9271efa56914a06d467aaafc55e0a1f754c6b31df8985dd42f2bfdb5ea8dc3\" successfully"
	Sep 16 11:10:50 multinode-890146 containerd[578]: time="2024-09-16T11:10:50.873180769Z" level=warning msg="Failed to get podSandbox status for container event for sandboxID \"6e9271efa56914a06d467aaafc55e0a1f754c6b31df8985dd42f2bfdb5ea8dc3\": an error occurred when try to find sandbox: not found. Sending the event with nil podSandboxStatus."
	Sep 16 11:10:50 multinode-890146 containerd[578]: time="2024-09-16T11:10:50.873415368Z" level=info msg="RemovePodSandbox \"6e9271efa56914a06d467aaafc55e0a1f754c6b31df8985dd42f2bfdb5ea8dc3\" returns successfully"
	
	
	==> coredns [92299bff0d2567667e4546c0c60418026422290d336e9613b5aed5df8af84cc5] <==
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 75e5db48a73272e2c90919c8256e5cca0293ae0ed689e2ed44f1254a9589c3d004cb3e693d059116718c47e9305987b828b11b2735a1cefa59e4a9489dda5cee
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:50907 - 6321 "HINFO IN 4361026237914167834.6224583088720257608. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.0149415s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[950690769]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:09:56.963) (total time: 30001ms):
	Trace[950690769]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (11:10:26.964)
	Trace[950690769]: [30.001619643s] [30.001619643s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1310660323]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:09:56.963) (total time: 30001ms):
	Trace[1310660323]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (11:10:26.965)
	Trace[1310660323]: [30.001340104s] [30.001340104s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[2008288180]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:09:56.964) (total time: 30002ms):
	Trace[2008288180]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30002ms (11:10:26.966)
	Trace[2008288180]: [30.002837778s] [30.002837778s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> coredns [e8a9035126acce637354968a7c4092e855f0da6d2a3f39d1f94b5795e1d5079b] <==
	[INFO] 10.244.0.3:52581 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.00009111s
	[INFO] 10.244.1.2:42040 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000125882s
	[INFO] 10.244.1.2:35392 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.002402252s
	[INFO] 10.244.1.2:42007 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000093398s
	[INFO] 10.244.1.2:58148 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000099052s
	[INFO] 10.244.1.2:60433 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001076606s
	[INFO] 10.244.1.2:45965 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000079565s
	[INFO] 10.244.1.2:44644 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000090806s
	[INFO] 10.244.1.2:36880 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000072689s
	[INFO] 10.244.0.3:35082 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000098806s
	[INFO] 10.244.0.3:38242 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000109899s
	[INFO] 10.244.0.3:60732 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.00008109s
	[INFO] 10.244.0.3:44313 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000072985s
	[INFO] 10.244.1.2:60136 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000150612s
	[INFO] 10.244.1.2:59383 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000110605s
	[INFO] 10.244.1.2:38945 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000092808s
	[INFO] 10.244.1.2:35665 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000081345s
	[INFO] 10.244.0.3:36942 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000120467s
	[INFO] 10.244.0.3:55441 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000108315s
	[INFO] 10.244.0.3:38725 - 5 "PTR IN 1.58.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000129336s
	[INFO] 10.244.0.3:40340 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000172389s
	[INFO] 10.244.1.2:40345 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000111507s
	[INFO] 10.244.1.2:51062 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.0000773s
	[INFO] 10.244.1.2:40631 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000083733s
	[INFO] 10.244.1.2:39196 - 5 "PTR IN 1.58.168.192.in-addr.arpa. udp 43 false 512" NOERROR qr,aa,rd 104 0.000074067s
	
	
	==> describe nodes <==
	Name:               multinode-890146
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-890146
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-890146
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_07_33_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:07:30 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-890146
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:11:27 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:09:55 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:09:55 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:09:55 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:09:55 +0000   Mon, 16 Sep 2024 11:07:30 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.58.2
	  Hostname:    multinode-890146
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 350b63ed0a2d40aaa99a6f15b5f19fc1
	  System UUID:                2cb24a37-7b71-4957-b8fd-d0da5c3f8b7a
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-hf6zl                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m20s
	  kube-system                 coredns-7c65d6cfc9-vp22b                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     3m57s
	  kube-system                 etcd-multinode-890146                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         4m1s
	  kube-system                 kindnet-dbrhk                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m57s
	  kube-system                 kube-apiserver-multinode-890146             250m (12%)    0 (0%)      0 (0%)           0 (0%)         4m1s
	  kube-system                 kube-controller-manager-multinode-890146    200m (10%)    0 (0%)      0 (0%)           0 (0%)         4m1s
	  kube-system                 kube-proxy-fm5qr                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m57s
	  kube-system                 kube-scheduler-multinode-890146             100m (5%)     0 (0%)      0 (0%)           0 (0%)         4m3s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m56s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                  From             Message
	  ----     ------                   ----                 ----             -------
	  Normal   Starting                 3m55s                kube-proxy       
	  Normal   Starting                 97s                  kube-proxy       
	  Normal   Starting                 4m9s                 kubelet          Starting kubelet.
	  Warning  CgroupV1                 4m9s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  4m9s (x8 over 4m9s)  kubelet          Node multinode-890146 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    4m9s (x7 over 4m9s)  kubelet          Node multinode-890146 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4m9s (x7 over 4m9s)  kubelet          Node multinode-890146 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  4m9s                 kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeAllocatableEnforced  4m2s                 kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 4m2s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   Starting                 4m2s                 kubelet          Starting kubelet.
	  Normal   NodeHasSufficientMemory  4m1s                 kubelet          Node multinode-890146 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    4m1s                 kubelet          Node multinode-890146 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4m1s                 kubelet          Node multinode-890146 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           3m58s                node-controller  Node multinode-890146 event: Registered Node multinode-890146 in Controller
	  Normal   Starting                 104s                 kubelet          Starting kubelet.
	  Warning  CgroupV1                 104s                 kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  104s (x8 over 104s)  kubelet          Node multinode-890146 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    104s (x7 over 104s)  kubelet          Node multinode-890146 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     104s (x7 over 104s)  kubelet          Node multinode-890146 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  104s                 kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           96s                  node-controller  Node multinode-890146 event: Registered Node multinode-890146 in Controller
	
	
	Name:               multinode-890146-m02
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-890146-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-890146
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_08_09_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:08:09 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-890146-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:11:34 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:10:53 +0000   Mon, 16 Sep 2024 11:10:53 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:10:53 +0000   Mon, 16 Sep 2024 11:10:53 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:10:53 +0000   Mon, 16 Sep 2024 11:10:53 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:10:53 +0000   Mon, 16 Sep 2024 11:10:53 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.58.3
	  Hostname:    multinode-890146-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 dd0be065973342c8a0f33649596b4d80
	  System UUID:                afe70f4d-0cb5-4f79-97b8-28a81db2fa30
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-wrnfh    0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m20s
	  kube-system                 kindnet-4sjj6              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      3m25s
	  kube-system                 kube-proxy-59f9h           0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m25s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 30s                    kube-proxy       
	  Normal   Starting                 3m22s                  kube-proxy       
	  Normal   NodeAllocatableEnforced  3m25s                  kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 3m25s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m25s (x2 over 3m25s)  kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m25s (x2 over 3m25s)  kubelet          Node multinode-890146-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m25s (x2 over 3m25s)  kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientPID
	  Normal   NodeReady                3m24s                  kubelet          Node multinode-890146-m02 status is now: NodeReady
	  Normal   RegisteredNode           3m23s                  node-controller  Node multinode-890146-m02 event: Registered Node multinode-890146-m02 in Controller
	  Normal   RegisteredNode           96s                    node-controller  Node multinode-890146-m02 event: Registered Node multinode-890146-m02 in Controller
	  Normal   NodeNotReady             56s                    node-controller  Node multinode-890146-m02 status is now: NodeNotReady
	  Normal   Starting                 54s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 54s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  54s                    kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasNoDiskPressure    47s (x7 over 54s)      kubelet          Node multinode-890146-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     47s (x7 over 54s)      kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  41s (x8 over 54s)      kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientMemory
	
	
	==> dmesg <==
	
	
	==> etcd [9a6e3be38656a34c99c98c1d83ac245ced91c2c4e06160058130d7bdf77a6cb2] <==
	{"level":"info","ts":"2024-09-16T11:09:51.773024Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"3a56e4ca95e2355c","local-member-id":"b2c6679ac05f2cf1","added-peer-id":"b2c6679ac05f2cf1","added-peer-peer-urls":["https://192.168.58.2:2380"]}
	{"level":"info","ts":"2024-09-16T11:09:51.773119Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"3a56e4ca95e2355c","local-member-id":"b2c6679ac05f2cf1","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:09:51.773157Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:09:51.779849Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:09:51.782047Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T11:09:51.782312Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"b2c6679ac05f2cf1","initial-advertise-peer-urls":["https://192.168.58.2:2380"],"listen-peer-urls":["https://192.168.58.2:2380"],"advertise-client-urls":["https://192.168.58.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.58.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:09:51.782345Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:09:51.782599Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:09:51.782610Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:09:53.150722Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T11:09:53.150971Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:09:53.151084Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgPreVoteResp from b2c6679ac05f2cf1 at term 2"}
	{"level":"info","ts":"2024-09-16T11:09:53.151186Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T11:09:53.151270Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgVoteResp from b2c6679ac05f2cf1 at term 3"}
	{"level":"info","ts":"2024-09-16T11:09:53.151366Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became leader at term 3"}
	{"level":"info","ts":"2024-09-16T11:09:53.151450Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: b2c6679ac05f2cf1 elected leader b2c6679ac05f2cf1 at term 3"}
	{"level":"info","ts":"2024-09-16T11:09:53.154994Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:09:53.156096Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:09:53.157207Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:09:53.154957Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"b2c6679ac05f2cf1","local-member-attributes":"{Name:multinode-890146 ClientURLs:[https://192.168.58.2:2379]}","request-path":"/0/members/b2c6679ac05f2cf1/attributes","cluster-id":"3a56e4ca95e2355c","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:09:53.165113Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:09:53.165419Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:09:53.165533Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:09:53.166421Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:09:53.167510Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.58.2:2379"}
	
	
	==> etcd [e8e11b0a6506f9d34c5800c4a5a6bcc8b9f3225a3487a3c437bc87d0b0aaf53d] <==
	{"level":"info","ts":"2024-09-16T11:07:26.598384Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T11:07:26.598542Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:07:26.598739Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:07:26.599020Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"b2c6679ac05f2cf1","initial-advertise-peer-urls":["https://192.168.58.2:2380"],"listen-peer-urls":["https://192.168.58.2:2380"],"advertise-client-urls":["https://192.168.58.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.58.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:07:26.599063Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:07:26.816839Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T11:07:26.816896Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T11:07:26.816925Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgPreVoteResp from b2c6679ac05f2cf1 at term 1"}
	{"level":"info","ts":"2024-09-16T11:07:26.816938Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.816946Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgVoteResp from b2c6679ac05f2cf1 at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.816957Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became leader at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.816966Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: b2c6679ac05f2cf1 elected leader b2c6679ac05f2cf1 at term 2"}
	{"level":"info","ts":"2024-09-16T11:07:26.818827Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.822083Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"b2c6679ac05f2cf1","local-member-attributes":"{Name:multinode-890146 ClientURLs:[https://192.168.58.2:2379]}","request-path":"/0/members/b2c6679ac05f2cf1/attributes","cluster-id":"3a56e4ca95e2355c","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:07:26.822228Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:07:26.823261Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:07:26.824339Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:07:26.830909Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:07:26.831002Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"3a56e4ca95e2355c","local-member-id":"b2c6679ac05f2cf1","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.831112Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.831144Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:07:26.831564Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:07:26.831585Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:07:26.840842Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:07:26.842343Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.58.2:2379"}
	
	
	==> kernel <==
	 11:11:34 up 1 day, 14:53,  0 users,  load average: 1.98, 2.00, 2.08
	Linux multinode-890146 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [0d3abd904fe54a1978d08d9978009d74bddb24839e5a5ca370830593e207f392] <==
	I0916 11:10:47.320633       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:10:57.320550       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:10:57.320587       1 main.go:299] handling current node
	I0916 11:10:57.320603       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:10:57.320609       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:10:57.320874       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:10:57.320894       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:11:07.319656       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:11:07.319711       1 main.go:299] handling current node
	I0916 11:11:07.319727       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:11:07.319733       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:11:07.320085       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:11:07.320104       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:11:17.327031       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:11:17.327071       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:11:17.327272       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:11:17.327288       1 main.go:299] handling current node
	I0916 11:11:17.327307       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:11:17.327398       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:11:27.321666       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:11:27.321706       1 main.go:299] handling current node
	I0916 11:11:27.321722       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:11:27.321729       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:11:27.321829       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:11:27.321835       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	
	
	==> kindnet [eccab3e428039af99fd1b2378ae8fd52f2837469955a8b78fd8b72f906813586] <==
	I0916 11:08:49.341182       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.2.0/24 Src: <nil> Gw: 192.168.58.4 Flags: [] Table: 0} 
	I0916 11:08:59.342099       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:08:59.342136       1 main.go:299] handling current node
	I0916 11:08:59.342153       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:08:59.342160       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:08:59.342285       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:08:59.342291       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:09:09.342281       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:09:09.342413       1 main.go:299] handling current node
	I0916 11:09:09.342453       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:09:09.342484       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:09:09.342714       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:09:09.342761       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:09:19.335765       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:09:19.335807       1 main.go:299] handling current node
	I0916 11:09:19.335824       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:09:19.335832       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:09:19.335959       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:09:19.335974       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:09:29.335775       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:09:29.335812       1 main.go:299] handling current node
	I0916 11:09:29.335828       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:09:29.335835       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:09:29.336078       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:09:29.336137       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	
	
	==> kube-apiserver [305b8895a34401a4626618470969b6ca3b591de13b0d36af0b2b2b23096ac46b] <==
	E0916 11:07:30.119633       1 controller.go:148] "Unhandled Error" err="while syncing ConfigMap \"kube-system/kube-apiserver-legacy-service-account-token-tracking\", err: namespaces \"kube-system\" not found" logger="UnhandledError"
	I0916 11:07:30.130522       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 11:07:30.311421       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:07:30.828773       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0916 11:07:30.837047       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0916 11:07:30.837071       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:07:31.543409       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 11:07:31.599890       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 11:07:31.699349       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 11:07:31.710041       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.58.2]
	I0916 11:07:31.711701       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:07:31.718520       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 11:07:31.982310       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 11:07:32.906044       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 11:07:32.918442       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 11:07:32.931656       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 11:07:36.841575       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0916 11:07:37.332525       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	E0916 11:08:30.067502       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36938: use of closed network connection
	E0916 11:08:30.295579       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36950: use of closed network connection
	E0916 11:08:30.506756       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36960: use of closed network connection
	E0916 11:08:30.713694       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:36984: use of closed network connection
	E0916 11:08:31.123853       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:37034: use of closed network connection
	E0916 11:08:31.466128       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:37056: use of closed network connection
	E0916 11:08:32.100602       1 conn.go:339] Error on socket receive: read tcp 192.168.58.2:8443->192.168.58.1:37082: use of closed network connection
	
	
	==> kube-apiserver [5973d4702c82301758aca3fa2a6a770d5ce1c6ff9abd4830207a977a63162fdc] <==
	I0916 11:09:55.487857       1 establishing_controller.go:81] Starting EstablishingController
	I0916 11:09:55.487882       1 nonstructuralschema_controller.go:195] Starting NonStructuralSchemaConditionController
	I0916 11:09:55.487897       1 apiapproval_controller.go:189] Starting KubernetesAPIApprovalPolicyConformantConditionController
	I0916 11:09:55.487908       1 crd_finalizer.go:269] Starting CRDFinalizer
	I0916 11:09:55.633515       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 11:09:55.634246       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 11:09:55.634429       1 aggregator.go:171] initial CRD sync complete...
	I0916 11:09:55.634810       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 11:09:55.634969       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 11:09:55.635042       1 cache.go:39] Caches are synced for autoregister controller
	I0916 11:09:55.646727       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 11:09:55.646836       1 policy_source.go:224] refreshing policies
	I0916 11:09:55.646962       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 11:09:55.713231       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 11:09:55.716777       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 11:09:55.717073       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 11:09:55.717359       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 11:09:55.717375       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 11:09:55.718065       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 11:09:55.720363       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	E0916 11:09:55.731730       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 11:09:55.734411       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:09:56.420819       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:09:59.090863       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:09:59.333962       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	
	
	==> kube-controller-manager [07455bc60716ac512dd7e5994733e02cc35ce9f026df34c778b61b0551008067] <==
	I0916 11:10:34.141882       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="190.399µs"
	I0916 11:10:38.866338       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:10:38.866654       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:10:38.869927       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:38.884252       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:10:38.893710       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:38.927610       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="12.231812ms"
	I0916 11:10:38.928406       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="60.808µs"
	I0916 11:10:44.011777       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:53.732505       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:10:53.733023       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:53.744789       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:53.931755       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:54.086927       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:03.972440       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="49.846µs"
	I0916 11:11:05.041793       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="11.011584ms"
	I0916 11:11:05.042014       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="71.409µs"
	I0916 11:11:19.990147       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:19.990739       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:11:20.022943       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:23.948848       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:28.044133       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:28.063057       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:28.628744       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:11:28.629038       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	
	
	==> kube-controller-manager [9d6ccf43cf5a5c28d56e616702330e693dc76d6773c7cc3e02e94f189195689b] <==
	I0916 11:08:14.387728       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="50.768606ms"
	I0916 11:08:14.452119       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="64.332523ms"
	I0916 11:08:14.452218       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="51.766µs"
	I0916 11:08:14.468869       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="65.157µs"
	I0916 11:08:16.989815       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="7.491662ms"
	I0916 11:08:16.989901       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="44.176µs"
	I0916 11:08:29.309361       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="12.824827ms"
	I0916 11:08:29.310058       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="648.874µs"
	I0916 11:08:34.122844       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146"
	I0916 11:08:39.809520       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:08:46.203029       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:08:46.203855       1 actual_state_of_world.go:540] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"multinode-890146-m03\" does not exist"
	I0916 11:08:46.218321       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="multinode-890146-m03" podCIDRs=["10.244.2.0/24"]
	I0916 11:08:46.218361       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.219096       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.235698       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.302636       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.605228       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:46.989720       1 node_lifecycle_controller.go:884] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="multinode-890146-m03"
	I0916 11:08:47.080416       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:47.243737       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:08:47.243773       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:47.254901       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:08:52.008891       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:09:14.825397       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	
	
	==> kube-proxy [654ecbfed03d8e141ce6b52bf8bf1bd98bc2fcb02e1f9df6947b6c89d77bde4b] <==
	I0916 11:09:56.888587       1 server_linux.go:66] "Using iptables proxy"
	I0916 11:09:57.098848       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.58.2"]
	E0916 11:09:57.099108       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 11:09:57.134068       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 11:09:57.134295       1 server_linux.go:169] "Using iptables Proxier"
	I0916 11:09:57.138428       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 11:09:57.139452       1 server.go:483] "Version info" version="v1.31.1"
	I0916 11:09:57.139482       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:09:57.144752       1 config.go:328] "Starting node config controller"
	I0916 11:09:57.144816       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 11:09:57.145039       1 config.go:199] "Starting service config controller"
	I0916 11:09:57.145095       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 11:09:57.145196       1 config.go:105] "Starting endpoint slice config controller"
	I0916 11:09:57.145235       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 11:09:57.245716       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 11:09:57.245735       1 shared_informer.go:320] Caches are synced for service config
	I0916 11:09:57.246330       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [88800ca3adcdad421bba0ffcef548a966eeb5c210e5453a2ba8470a9e90ea01e] <==
	I0916 11:07:38.741936       1 server_linux.go:66] "Using iptables proxy"
	I0916 11:07:38.860580       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.58.2"]
	E0916 11:07:38.860641       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 11:07:38.962890       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 11:07:38.962952       1 server_linux.go:169] "Using iptables Proxier"
	I0916 11:07:38.967180       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 11:07:38.968664       1 server.go:483] "Version info" version="v1.31.1"
	I0916 11:07:38.968689       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:07:38.978327       1 config.go:199] "Starting service config controller"
	I0916 11:07:38.978605       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 11:07:38.979008       1 config.go:105] "Starting endpoint slice config controller"
	I0916 11:07:38.979892       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 11:07:38.980656       1 config.go:328] "Starting node config controller"
	I0916 11:07:38.980803       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 11:07:39.083387       1 shared_informer.go:320] Caches are synced for node config
	I0916 11:07:39.083600       1 shared_informer.go:320] Caches are synced for service config
	I0916 11:07:39.083628       1 shared_informer.go:320] Caches are synced for endpoint slice config
	
	
	==> kube-scheduler [2cc43e414446d6a831068e3af8cf0e1b1501b91167342420a33bdc74bc31c020] <==
	W0916 11:09:55.628657       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 11:09:55.628676       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.628756       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 11:09:55.628784       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.628837       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 11:09:55.628872       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.629209       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 11:09:55.629372       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.629579       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 11:09:55.629714       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.629937       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 11:09:55.637784       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.637395       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 11:09:55.637851       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.637587       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 11:09:55.637872       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.637674       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 11:09:55.637973       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.637729       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 11:09:55.638060       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.638206       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 11:09:55.638278       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.642795       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 11:09:55.642891       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	I0916 11:09:55.696400       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [424e6c1030bdc58751fd76a7652c31e5bd7dff844d888049b87815ddfaecc90b] <==
	I0916 11:07:29.575629       1 serving.go:386] Generated self-signed cert in-memory
	I0916 11:07:32.225115       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 11:07:32.225654       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:07:32.234302       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 11:07:32.234494       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 11:07:32.234575       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 11:07:32.234645       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 11:07:32.244028       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 11:07:32.245351       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 11:07:32.244860       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 11:07:32.254326       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 11:07:32.335551       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 11:07:32.355020       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 11:07:32.355033       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:09:55 multinode-890146 kubelet[666]: I0916 11:09:55.713070     666 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 11:09:55 multinode-890146 kubelet[666]: I0916 11:09:55.858378     666 apiserver.go:52] "Watching apiserver"
	Sep 16 11:09:55 multinode-890146 kubelet[666]: I0916 11:09:55.867158     666 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world"
	Sep 16 11:09:55 multinode-890146 kubelet[666]: I0916 11:09:55.905716     666 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73-xtables-lock\") pod \"kube-proxy-fm5qr\" (UID: \"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73\") " pod="kube-system/kube-proxy-fm5qr"
	Sep 16 11:09:55 multinode-890146 kubelet[666]: I0916 11:09:55.909483     666 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-cfg\" (UniqueName: \"kubernetes.io/host-path/24ef6be7-a1ab-41f7-83c8-aa5af5007281-cni-cfg\") pod \"kindnet-dbrhk\" (UID: \"24ef6be7-a1ab-41f7-83c8-aa5af5007281\") " pod="kube-system/kindnet-dbrhk"
	Sep 16 11:09:55 multinode-890146 kubelet[666]: I0916 11:09:55.910550     666 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73-lib-modules\") pod \"kube-proxy-fm5qr\" (UID: \"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73\") " pod="kube-system/kube-proxy-fm5qr"
	Sep 16 11:09:55 multinode-890146 kubelet[666]: I0916 11:09:55.910573     666 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/24ef6be7-a1ab-41f7-83c8-aa5af5007281-lib-modules\") pod \"kindnet-dbrhk\" (UID: \"24ef6be7-a1ab-41f7-83c8-aa5af5007281\") " pod="kube-system/kindnet-dbrhk"
	Sep 16 11:09:55 multinode-890146 kubelet[666]: I0916 11:09:55.910618     666 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/97795413-5c7a-480b-9cbd-18d4dea5669b-tmp\") pod \"storage-provisioner\" (UID: \"97795413-5c7a-480b-9cbd-18d4dea5669b\") " pod="kube-system/storage-provisioner"
	Sep 16 11:09:55 multinode-890146 kubelet[666]: I0916 11:09:55.910641     666 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/24ef6be7-a1ab-41f7-83c8-aa5af5007281-xtables-lock\") pod \"kindnet-dbrhk\" (UID: \"24ef6be7-a1ab-41f7-83c8-aa5af5007281\") " pod="kube-system/kindnet-dbrhk"
	Sep 16 11:09:55 multinode-890146 kubelet[666]: I0916 11:09:55.926388     666 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 11:10:00 multinode-890146 kubelet[666]: E0916 11:10:00.996097     666 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 11:10:00 multinode-890146 kubelet[666]: E0916 11:10:00.996140     666 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 11:10:04 multinode-890146 kubelet[666]: I0916 11:10:04.114412     666 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness"
	Sep 16 11:10:11 multinode-890146 kubelet[666]: E0916 11:10:11.022600     666 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 11:10:11 multinode-890146 kubelet[666]: E0916 11:10:11.022653     666 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 11:10:21 multinode-890146 kubelet[666]: E0916 11:10:21.047422     666 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 11:10:21 multinode-890146 kubelet[666]: E0916 11:10:21.047480     666 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 11:10:27 multinode-890146 kubelet[666]: I0916 11:10:27.129575     666 scope.go:117] "RemoveContainer" containerID="0ca1a17f4990929d27725b61f7cdbaae3f44772041814b26daa29ba43c68ec37"
	Sep 16 11:10:27 multinode-890146 kubelet[666]: I0916 11:10:27.129974     666 scope.go:117] "RemoveContainer" containerID="c7fed01434f6a417aa19b346e2f46371fd7d5ccdf5fdd7ab5ed366a9dfb17507"
	Sep 16 11:10:27 multinode-890146 kubelet[666]: E0916 11:10:27.130128     666 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"storage-provisioner\" with CrashLoopBackOff: \"back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(97795413-5c7a-480b-9cbd-18d4dea5669b)\"" pod="kube-system/storage-provisioner" podUID="97795413-5c7a-480b-9cbd-18d4dea5669b"
	Sep 16 11:10:31 multinode-890146 kubelet[666]: E0916 11:10:31.063890     666 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 11:10:31 multinode-890146 kubelet[666]: E0916 11:10:31.063954     666 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 11:10:41 multinode-890146 kubelet[666]: E0916 11:10:41.084519     666 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 11:10:41 multinode-890146 kubelet[666]: E0916 11:10:41.085037     666 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 11:10:41 multinode-890146 kubelet[666]: I0916 11:10:41.887325     666 scope.go:117] "RemoveContainer" containerID="c7fed01434f6a417aa19b346e2f46371fd7d5ccdf5fdd7ab5ed366a9dfb17507"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p multinode-890146 -n multinode-890146
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (503.727µs)
helpers_test.go:263: kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiNode/serial/DeleteNode (9.28s)

                                                
                                    
x
+
TestMultiNode/serial/RestartMultiNode (51.6s)

                                                
                                                
=== RUN   TestMultiNode/serial/RestartMultiNode
multinode_test.go:376: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-890146 --wait=true -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd
multinode_test.go:376: (dbg) Done: out/minikube-linux-arm64 start -p multinode-890146 --wait=true -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd: (48.138785539s)
multinode_test.go:382: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 status --alsologtostderr
multinode_test.go:396: (dbg) Run:  kubectl get nodes
multinode_test.go:396: (dbg) Non-zero exit: kubectl get nodes: fork/exec /usr/local/bin/kubectl: exec format error (664.005µs)
multinode_test.go:398: failed to run kubectl get nodes. args "kubectl get nodes" : fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiNode/serial/RestartMultiNode]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect multinode-890146
helpers_test.go:235: (dbg) docker inspect multinode-890146:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb",
	        "Created": "2024-09-16T11:07:09.881207881Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2195424,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:12:01.565276442Z",
	            "FinishedAt": "2024-09-16T11:12:00.416795497Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/hostname",
	        "HostsPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/hosts",
	        "LogPath": "/var/lib/docker/containers/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb-json.log",
	        "Name": "/multinode-890146",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "multinode-890146:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "multinode-890146",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/merged",
	                "UpperDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/diff",
	                "WorkDir": "/var/lib/docker/overlay2/1b4687a32093bd43e8c1e377ce13aea12cdd7819adc3c117084290277ddb29d7/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "multinode-890146",
	                "Source": "/var/lib/docker/volumes/multinode-890146/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "multinode-890146",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "multinode-890146",
	                "name.minikube.sigs.k8s.io": "multinode-890146",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "6a810e7228ca2e669a7a0bb9aa32fdd1a84ea88193dcd1e02e8ae7f47cfb0667",
	            "SandboxKey": "/var/run/docker/netns/6a810e7228ca",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40752"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40753"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40756"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40754"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40755"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "multinode-890146": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.58.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:3a:02",
	                    "DriverOpts": null,
	                    "NetworkID": "b138f637362d33b7ccebcd9c06d6cdaa35c434cdf582fc761f98e8246e8681cc",
	                    "EndpointID": "f58ec4293aa1f2234be7eaf113e4139637a2cb93d78cf8d692cbbffdae1faa18",
	                    "Gateway": "192.168.58.1",
	                    "IPAddress": "192.168.58.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "multinode-890146",
	                        "d045dde36e30"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p multinode-890146 -n multinode-890146
helpers_test.go:244: <<< TestMultiNode/serial/RestartMultiNode FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/RestartMultiNode]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p multinode-890146 logs -n 25: (1.773194225s)
helpers_test.go:252: TestMultiNode/serial/RestartMultiNode logs: 
-- stdout --
	
	==> Audit <==
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| Command |                                          Args                                           |     Profile      |  User   | Version |     Start Time      |      End Time       |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	| cp      | multinode-890146 cp multinode-890146-m02:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146:/home/docker/cp-test_multinode-890146-m02_multinode-890146.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | multinode-890146-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146 sudo cat                                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:08 UTC |
	|         | /home/docker/cp-test_multinode-890146-m02_multinode-890146.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m02:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:08 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03:/home/docker/cp-test_multinode-890146-m02_multinode-890146-m03.txt |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m02 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146-m03 sudo cat                                   | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /home/docker/cp-test_multinode-890146-m02_multinode-890146-m03.txt                      |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp testdata/cp-test.txt                                                | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03:/home/docker/cp-test.txt                                           |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /tmp/TestMultiNodeserialCopyFile3892048771/001/cp-test_multinode-890146-m03.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146:/home/docker/cp-test_multinode-890146-m03_multinode-890146.txt         |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146 sudo cat                                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /home/docker/cp-test_multinode-890146-m03_multinode-890146.txt                          |                  |         |         |                     |                     |
	| cp      | multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt                       | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m02:/home/docker/cp-test_multinode-890146-m03_multinode-890146-m02.txt |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n                                                                 | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | multinode-890146-m03 sudo cat                                                           |                  |         |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                |                  |         |         |                     |                     |
	| ssh     | multinode-890146 ssh -n multinode-890146-m02 sudo cat                                   | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | /home/docker/cp-test_multinode-890146-m03_multinode-890146-m02.txt                      |                  |         |         |                     |                     |
	| node    | multinode-890146 node stop m03                                                          | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	| node    | multinode-890146 node start                                                             | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	|         | m03 -v=7 --alsologtostderr                                                              |                  |         |         |                     |                     |
	| node    | list -p multinode-890146                                                                | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC |                     |
	| stop    | -p multinode-890146                                                                     | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:09 UTC |
	| start   | -p multinode-890146                                                                     | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:09 UTC | 16 Sep 24 11:11 UTC |
	|         | --wait=true -v=8                                                                        |                  |         |         |                     |                     |
	|         | --alsologtostderr                                                                       |                  |         |         |                     |                     |
	| node    | list -p multinode-890146                                                                | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC |                     |
	| node    | multinode-890146 node delete                                                            | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:11 UTC |
	|         | m03                                                                                     |                  |         |         |                     |                     |
	| stop    | multinode-890146 stop                                                                   | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:11 UTC | 16 Sep 24 11:12 UTC |
	| start   | -p multinode-890146                                                                     | multinode-890146 | jenkins | v1.34.0 | 16 Sep 24 11:12 UTC | 16 Sep 24 11:12 UTC |
	|         | --wait=true -v=8                                                                        |                  |         |         |                     |                     |
	|         | --alsologtostderr                                                                       |                  |         |         |                     |                     |
	|         | --driver=docker                                                                         |                  |         |         |                     |                     |
	|         | --container-runtime=containerd                                                          |                  |         |         |                     |                     |
	|---------|-----------------------------------------------------------------------------------------|------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:12:00
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:12:00.990177 2195229 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:12:00.990390 2195229 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:12:00.990417 2195229 out.go:358] Setting ErrFile to fd 2...
	I0916 11:12:00.990438 2195229 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:12:00.990783 2195229 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 11:12:00.991217 2195229 out.go:352] Setting JSON to false
	I0916 11:12:00.992228 2195229 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":140063,"bootTime":1726345058,"procs":189,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 11:12:00.992328 2195229 start.go:139] virtualization:  
	I0916 11:12:00.994889 2195229 out.go:177] * [multinode-890146] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:12:00.996767 2195229 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:12:00.996934 2195229 notify.go:220] Checking for updates...
	I0916 11:12:01.000881 2195229 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:12:01.004865 2195229 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:12:01.007458 2195229 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 11:12:01.009473 2195229 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:12:01.011298 2195229 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:12:01.013615 2195229 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:12:01.014203 2195229 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:12:01.046865 2195229 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:12:01.047009 2195229 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:12:01.114255 2195229 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:4 ContainersRunning:2 ContainersPaused:0 ContainersStopped:2 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:54 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 11:12:01.104559921 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:12:01.114365 2195229 docker.go:318] overlay module found
	I0916 11:12:01.116403 2195229 out.go:177] * Using the docker driver based on existing profile
	I0916 11:12:01.118263 2195229 start.go:297] selected driver: docker
	I0916 11:12:01.118278 2195229 start.go:901] validating driver "docker" against &{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName
:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false
nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:12:01.118433 2195229 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:12:01.118543 2195229 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:12:01.184298 2195229 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:4 ContainersRunning:2 ContainersPaused:0 ContainersStopped:2 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:54 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 11:12:01.174243644 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:12:01.184796 2195229 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:12:01.184832 2195229 cni.go:84] Creating CNI manager for ""
	I0916 11:12:01.184877 2195229 cni.go:136] multinode detected (2 nodes found), recommending kindnet
	I0916 11:12:01.184926 2195229 start.go:340] cluster config:
	{Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-secur
ity-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:12:01.187290 2195229 out.go:177] * Starting "multinode-890146" primary control-plane node in "multinode-890146" cluster
	I0916 11:12:01.189705 2195229 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:12:01.191838 2195229 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:12:01.193942 2195229 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:12:01.194005 2195229 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 11:12:01.194018 2195229 cache.go:56] Caching tarball of preloaded images
	I0916 11:12:01.194029 2195229 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:12:01.194109 2195229 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 11:12:01.194120 2195229 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 11:12:01.194264 2195229 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	W0916 11:12:01.215038 2195229 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:12:01.215061 2195229 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:12:01.215150 2195229 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:12:01.215173 2195229 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:12:01.215179 2195229 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:12:01.215187 2195229 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:12:01.215192 2195229 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:12:01.216556 2195229 image.go:273] response: 
	I0916 11:12:01.417508 2195229 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:12:01.417550 2195229 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:12:01.417598 2195229 start.go:360] acquireMachinesLock for multinode-890146: {Name:mk50282545d8a591b3d758c5d48e2059a356819d Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:12:01.417680 2195229 start.go:364] duration metric: took 53.309µs to acquireMachinesLock for "multinode-890146"
	I0916 11:12:01.417704 2195229 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:12:01.417714 2195229 fix.go:54] fixHost starting: 
	I0916 11:12:01.417996 2195229 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:12:01.437600 2195229 fix.go:112] recreateIfNeeded on multinode-890146: state=Stopped err=<nil>
	W0916 11:12:01.437633 2195229 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:12:01.440947 2195229 out.go:177] * Restarting existing docker container for "multinode-890146" ...
	I0916 11:12:01.442639 2195229 cli_runner.go:164] Run: docker start multinode-890146
	I0916 11:12:01.771865 2195229 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:12:01.793020 2195229 kic.go:430] container "multinode-890146" state is running.
	I0916 11:12:01.793431 2195229 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:12:01.816647 2195229 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:12:01.816888 2195229 machine.go:93] provisionDockerMachine start ...
	I0916 11:12:01.816953 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:12:01.838877 2195229 main.go:141] libmachine: Using SSH client type: native
	I0916 11:12:01.839140 2195229 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40752 <nil> <nil>}
	I0916 11:12:01.839155 2195229 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:12:01.839982 2195229 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:12:04.978436 2195229 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146
	
	I0916 11:12:04.978470 2195229 ubuntu.go:169] provisioning hostname "multinode-890146"
	I0916 11:12:04.978583 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:12:04.995840 2195229 main.go:141] libmachine: Using SSH client type: native
	I0916 11:12:04.996106 2195229 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40752 <nil> <nil>}
	I0916 11:12:04.996126 2195229 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-890146 && echo "multinode-890146" | sudo tee /etc/hostname
	I0916 11:12:05.151330 2195229 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146
	
	I0916 11:12:05.151427 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:12:05.169367 2195229 main.go:141] libmachine: Using SSH client type: native
	I0916 11:12:05.169656 2195229 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40752 <nil> <nil>}
	I0916 11:12:05.169685 2195229 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-890146' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-890146/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-890146' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:12:05.306655 2195229 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:12:05.306696 2195229 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 11:12:05.306727 2195229 ubuntu.go:177] setting up certificates
	I0916 11:12:05.306737 2195229 provision.go:84] configureAuth start
	I0916 11:12:05.306797 2195229 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:12:05.325692 2195229 provision.go:143] copyHostCerts
	I0916 11:12:05.325747 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:12:05.325786 2195229 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 11:12:05.325797 2195229 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:12:05.325872 2195229 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 11:12:05.325968 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:12:05.325989 2195229 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 11:12:05.326001 2195229 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:12:05.326030 2195229 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 11:12:05.326083 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:12:05.326104 2195229 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 11:12:05.326111 2195229 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:12:05.326139 2195229 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 11:12:05.326199 2195229 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.multinode-890146 san=[127.0.0.1 192.168.58.2 localhost minikube multinode-890146]
	I0916 11:12:05.888577 2195229 provision.go:177] copyRemoteCerts
	I0916 11:12:05.888653 2195229 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:12:05.888697 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:12:05.905330 2195229 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40752 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:12:06.017688 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:12:06.017758 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 11:12:06.045058 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:12:06.045127 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1216 bytes)
	I0916 11:12:06.071367 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:12:06.071438 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:12:06.097528 2195229 provision.go:87] duration metric: took 790.765509ms to configureAuth
	I0916 11:12:06.097558 2195229 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:12:06.097789 2195229 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:12:06.097804 2195229 machine.go:96] duration metric: took 4.280906896s to provisionDockerMachine
	I0916 11:12:06.097813 2195229 start.go:293] postStartSetup for "multinode-890146" (driver="docker")
	I0916 11:12:06.097824 2195229 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:12:06.097888 2195229 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:12:06.097934 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:12:06.115027 2195229 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40752 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:12:06.211900 2195229 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:12:06.215093 2195229 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:12:06.215111 2195229 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:12:06.215119 2195229 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:12:06.215124 2195229 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:12:06.215129 2195229 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:12:06.215133 2195229 command_runner.go:130] > ID=ubuntu
	I0916 11:12:06.215137 2195229 command_runner.go:130] > ID_LIKE=debian
	I0916 11:12:06.215142 2195229 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:12:06.215147 2195229 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:12:06.215152 2195229 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:12:06.215159 2195229 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:12:06.215163 2195229 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:12:06.215219 2195229 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:12:06.215248 2195229 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:12:06.215263 2195229 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:12:06.215271 2195229 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:12:06.215284 2195229 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 11:12:06.215342 2195229 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 11:12:06.215429 2195229 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 11:12:06.215440 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 11:12:06.215551 2195229 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:12:06.224254 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:12:06.248637 2195229 start.go:296] duration metric: took 150.808408ms for postStartSetup
	I0916 11:12:06.248731 2195229 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:12:06.248780 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:12:06.265551 2195229 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40752 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:12:06.363064 2195229 command_runner.go:130] > 21%
	I0916 11:12:06.363575 2195229 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:12:06.367697 2195229 command_runner.go:130] > 154G
	I0916 11:12:06.368111 2195229 fix.go:56] duration metric: took 4.950391509s for fixHost
	I0916 11:12:06.368143 2195229 start.go:83] releasing machines lock for "multinode-890146", held for 4.950451537s
	I0916 11:12:06.368213 2195229 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:12:06.384396 2195229 ssh_runner.go:195] Run: cat /version.json
	I0916 11:12:06.384451 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:12:06.384452 2195229 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:12:06.384539 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:12:06.401491 2195229 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40752 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:12:06.406786 2195229 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40752 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:12:06.619497 2195229 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:12:06.619579 2195229 command_runner.go:130] > {"iso_version": "v1.34.0-1726281733-19643", "kicbase_version": "v0.0.45-1726358845-19644", "minikube_version": "v1.34.0", "commit": "f890713149c79cf50e25c13e6a5c0470aa0f0450"}
	I0916 11:12:06.619761 2195229 ssh_runner.go:195] Run: systemctl --version
	I0916 11:12:06.623940 2195229 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0916 11:12:06.624034 2195229 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0916 11:12:06.624473 2195229 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:12:06.628383 2195229 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:12:06.628424 2195229 command_runner.go:130] >   Size: 78        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:12:06.628432 2195229 command_runner.go:130] > Device: 3ch/60d	Inode: 1324575     Links: 1
	I0916 11:12:06.628439 2195229 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:12:06.628445 2195229 command_runner.go:130] > Access: 2024-09-16 11:12:02.222838018 +0000
	I0916 11:12:06.628451 2195229 command_runner.go:130] > Modify: 2024-09-16 11:09:49.447553385 +0000
	I0916 11:12:06.628456 2195229 command_runner.go:130] > Change: 2024-09-16 11:09:49.447553385 +0000
	I0916 11:12:06.628461 2195229 command_runner.go:130] >  Birth: 2024-09-16 11:09:49.447553385 +0000
	I0916 11:12:06.628695 2195229 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 11:12:06.646999 2195229 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:12:06.647082 2195229 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:12:06.656262 2195229 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:12:06.656340 2195229 start.go:495] detecting cgroup driver to use...
	I0916 11:12:06.656382 2195229 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:12:06.656443 2195229 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 11:12:06.670984 2195229 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 11:12:06.683142 2195229 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:12:06.683259 2195229 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:12:06.696950 2195229 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:12:06.708865 2195229 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:12:06.787155 2195229 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:12:06.866667 2195229 docker.go:233] disabling docker service ...
	I0916 11:12:06.866790 2195229 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:12:06.879485 2195229 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:12:06.891075 2195229 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:12:06.995431 2195229 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:12:07.089336 2195229 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:12:07.101309 2195229 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:12:07.116370 2195229 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0916 11:12:07.117751 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 11:12:07.127978 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 11:12:07.137982 2195229 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 11:12:07.138058 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 11:12:07.148139 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:12:07.157974 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 11:12:07.168065 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:12:07.178215 2195229 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:12:07.187958 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 11:12:07.199136 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 11:12:07.209045 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 11:12:07.219281 2195229 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:12:07.226831 2195229 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:12:07.228035 2195229 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:12:07.236518 2195229 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:12:07.317459 2195229 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 11:12:07.476471 2195229 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 11:12:07.476546 2195229 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 11:12:07.480012 2195229 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0916 11:12:07.480038 2195229 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:12:07.480046 2195229 command_runner.go:130] > Device: 45h/69d	Inode: 160         Links: 1
	I0916 11:12:07.480053 2195229 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:12:07.480059 2195229 command_runner.go:130] > Access: 2024-09-16 11:12:07.390809834 +0000
	I0916 11:12:07.480069 2195229 command_runner.go:130] > Modify: 2024-09-16 11:12:07.390809834 +0000
	I0916 11:12:07.480075 2195229 command_runner.go:130] > Change: 2024-09-16 11:12:07.390809834 +0000
	I0916 11:12:07.480079 2195229 command_runner.go:130] >  Birth: -
	I0916 11:12:07.480478 2195229 start.go:563] Will wait 60s for crictl version
	I0916 11:12:07.480534 2195229 ssh_runner.go:195] Run: which crictl
	I0916 11:12:07.483675 2195229 command_runner.go:130] > /usr/bin/crictl
	I0916 11:12:07.484115 2195229 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:12:07.523781 2195229 command_runner.go:130] > Version:  0.1.0
	I0916 11:12:07.523804 2195229 command_runner.go:130] > RuntimeName:  containerd
	I0916 11:12:07.523810 2195229 command_runner.go:130] > RuntimeVersion:  1.7.22
	I0916 11:12:07.523814 2195229 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:12:07.526396 2195229 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 11:12:07.526483 2195229 ssh_runner.go:195] Run: containerd --version
	I0916 11:12:07.547490 2195229 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:12:07.549475 2195229 ssh_runner.go:195] Run: containerd --version
	I0916 11:12:07.571397 2195229 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:12:07.574823 2195229 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 11:12:07.576823 2195229 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:12:07.592727 2195229 ssh_runner.go:195] Run: grep 192.168.58.1	host.minikube.internal$ /etc/hosts
	I0916 11:12:07.596585 2195229 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.58.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:12:07.607561 2195229 kubeadm.go:883] updating cluster {Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-ins
taller:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:12:07.607714 2195229 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:12:07.607776 2195229 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:12:07.642745 2195229 command_runner.go:130] > {
	I0916 11:12:07.642768 2195229 command_runner.go:130] >   "images": [
	I0916 11:12:07.642773 2195229 command_runner.go:130] >     {
	I0916 11:12:07.642783 2195229 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:12:07.642788 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.642801 2195229 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:12:07.642804 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.642808 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.642817 2195229 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:12:07.642825 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.642829 2195229 command_runner.go:130] >       "size": "33309097",
	I0916 11:12:07.642836 2195229 command_runner.go:130] >       "uid": null,
	I0916 11:12:07.642839 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.642843 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.642852 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.642856 2195229 command_runner.go:130] >     },
	I0916 11:12:07.642859 2195229 command_runner.go:130] >     {
	I0916 11:12:07.642869 2195229 command_runner.go:130] >       "id": "sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd",
	I0916 11:12:07.642876 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.642881 2195229 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox:1.28"
	I0916 11:12:07.642884 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.642888 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.642897 2195229 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12"
	I0916 11:12:07.642901 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.642907 2195229 command_runner.go:130] >       "size": "764554",
	I0916 11:12:07.642911 2195229 command_runner.go:130] >       "uid": null,
	I0916 11:12:07.642915 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.642918 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.642925 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.642928 2195229 command_runner.go:130] >     },
	I0916 11:12:07.642933 2195229 command_runner.go:130] >     {
	I0916 11:12:07.642940 2195229 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:12:07.642946 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.642956 2195229 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:12:07.642959 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.642963 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.642974 2195229 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:12:07.642979 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.642983 2195229 command_runner.go:130] >       "size": "8034419",
	I0916 11:12:07.642986 2195229 command_runner.go:130] >       "uid": null,
	I0916 11:12:07.642990 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.642993 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.642997 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.643000 2195229 command_runner.go:130] >     },
	I0916 11:12:07.643006 2195229 command_runner.go:130] >     {
	I0916 11:12:07.643013 2195229 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:12:07.643019 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.643025 2195229 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:12:07.643028 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643032 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.643041 2195229 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:12:07.643047 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643051 2195229 command_runner.go:130] >       "size": "16948420",
	I0916 11:12:07.643055 2195229 command_runner.go:130] >       "uid": null,
	I0916 11:12:07.643059 2195229 command_runner.go:130] >       "username": "nonroot",
	I0916 11:12:07.643065 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.643068 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.643071 2195229 command_runner.go:130] >     },
	I0916 11:12:07.643074 2195229 command_runner.go:130] >     {
	I0916 11:12:07.643081 2195229 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:12:07.643088 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.643093 2195229 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:12:07.643098 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643101 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.643111 2195229 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 11:12:07.643124 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643128 2195229 command_runner.go:130] >       "size": "66535646",
	I0916 11:12:07.643131 2195229 command_runner.go:130] >       "uid": {
	I0916 11:12:07.643135 2195229 command_runner.go:130] >         "value": "0"
	I0916 11:12:07.643140 2195229 command_runner.go:130] >       },
	I0916 11:12:07.643144 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.643148 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.643155 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.643158 2195229 command_runner.go:130] >     },
	I0916 11:12:07.643161 2195229 command_runner.go:130] >     {
	I0916 11:12:07.643168 2195229 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:12:07.643174 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.643179 2195229 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:12:07.643191 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643195 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.643203 2195229 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 11:12:07.643212 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643216 2195229 command_runner.go:130] >       "size": "25687130",
	I0916 11:12:07.643219 2195229 command_runner.go:130] >       "uid": {
	I0916 11:12:07.643229 2195229 command_runner.go:130] >         "value": "0"
	I0916 11:12:07.643232 2195229 command_runner.go:130] >       },
	I0916 11:12:07.643236 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.643239 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.643243 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.643246 2195229 command_runner.go:130] >     },
	I0916 11:12:07.643249 2195229 command_runner.go:130] >     {
	I0916 11:12:07.643256 2195229 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:12:07.643262 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.643267 2195229 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:12:07.643277 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643281 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.643289 2195229 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 11:12:07.643295 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643298 2195229 command_runner.go:130] >       "size": "23948670",
	I0916 11:12:07.643302 2195229 command_runner.go:130] >       "uid": {
	I0916 11:12:07.643305 2195229 command_runner.go:130] >         "value": "0"
	I0916 11:12:07.643309 2195229 command_runner.go:130] >       },
	I0916 11:12:07.643313 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.643317 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.643323 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.643326 2195229 command_runner.go:130] >     },
	I0916 11:12:07.643329 2195229 command_runner.go:130] >     {
	I0916 11:12:07.643336 2195229 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:12:07.643342 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.643347 2195229 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:12:07.643356 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643360 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.643368 2195229 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 11:12:07.643377 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643381 2195229 command_runner.go:130] >       "size": "26756812",
	I0916 11:12:07.643386 2195229 command_runner.go:130] >       "uid": null,
	I0916 11:12:07.643392 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.643396 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.643399 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.643402 2195229 command_runner.go:130] >     },
	I0916 11:12:07.643405 2195229 command_runner.go:130] >     {
	I0916 11:12:07.643412 2195229 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:12:07.643418 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.643423 2195229 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:12:07.643428 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643432 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.643440 2195229 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:12:07.643446 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643450 2195229 command_runner.go:130] >       "size": "18507674",
	I0916 11:12:07.643453 2195229 command_runner.go:130] >       "uid": {
	I0916 11:12:07.643457 2195229 command_runner.go:130] >         "value": "0"
	I0916 11:12:07.643460 2195229 command_runner.go:130] >       },
	I0916 11:12:07.643466 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.643469 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.643479 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.643482 2195229 command_runner.go:130] >     },
	I0916 11:12:07.643485 2195229 command_runner.go:130] >     {
	I0916 11:12:07.643497 2195229 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:12:07.643501 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.643505 2195229 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:12:07.643511 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643515 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.643524 2195229 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:12:07.643535 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.643538 2195229 command_runner.go:130] >       "size": "267933",
	I0916 11:12:07.643542 2195229 command_runner.go:130] >       "uid": {
	I0916 11:12:07.643556 2195229 command_runner.go:130] >         "value": "65535"
	I0916 11:12:07.643563 2195229 command_runner.go:130] >       },
	I0916 11:12:07.643567 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.643571 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.643574 2195229 command_runner.go:130] >       "pinned": true
	I0916 11:12:07.643577 2195229 command_runner.go:130] >     }
	I0916 11:12:07.643580 2195229 command_runner.go:130] >   ]
	I0916 11:12:07.643583 2195229 command_runner.go:130] > }
	I0916 11:12:07.646498 2195229 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 11:12:07.646519 2195229 containerd.go:534] Images already preloaded, skipping extraction
	I0916 11:12:07.646581 2195229 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:12:07.683004 2195229 command_runner.go:130] > {
	I0916 11:12:07.683025 2195229 command_runner.go:130] >   "images": [
	I0916 11:12:07.683029 2195229 command_runner.go:130] >     {
	I0916 11:12:07.683038 2195229 command_runner.go:130] >       "id": "sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51",
	I0916 11:12:07.683043 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.683049 2195229 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20240813-c6f155d6"
	I0916 11:12:07.683052 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683056 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.683065 2195229 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"
	I0916 11:12:07.683068 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683073 2195229 command_runner.go:130] >       "size": "33309097",
	I0916 11:12:07.683077 2195229 command_runner.go:130] >       "uid": null,
	I0916 11:12:07.683085 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.683088 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.683092 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.683096 2195229 command_runner.go:130] >     },
	I0916 11:12:07.683103 2195229 command_runner.go:130] >     {
	I0916 11:12:07.683111 2195229 command_runner.go:130] >       "id": "sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd",
	I0916 11:12:07.683118 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.683123 2195229 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox:1.28"
	I0916 11:12:07.683126 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683130 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.683138 2195229 command_runner.go:130] >         "gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12"
	I0916 11:12:07.683145 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683151 2195229 command_runner.go:130] >       "size": "764554",
	I0916 11:12:07.683156 2195229 command_runner.go:130] >       "uid": null,
	I0916 11:12:07.683160 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.683164 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.683168 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.683173 2195229 command_runner.go:130] >     },
	I0916 11:12:07.683176 2195229 command_runner.go:130] >     {
	I0916 11:12:07.683183 2195229 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0916 11:12:07.683187 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.683197 2195229 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0916 11:12:07.683207 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683211 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.683219 2195229 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0916 11:12:07.683228 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683241 2195229 command_runner.go:130] >       "size": "8034419",
	I0916 11:12:07.683248 2195229 command_runner.go:130] >       "uid": null,
	I0916 11:12:07.683252 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.683256 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.683260 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.683263 2195229 command_runner.go:130] >     },
	I0916 11:12:07.683266 2195229 command_runner.go:130] >     {
	I0916 11:12:07.683272 2195229 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0916 11:12:07.683278 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.683283 2195229 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0916 11:12:07.683288 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683292 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.683300 2195229 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0916 11:12:07.683305 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683309 2195229 command_runner.go:130] >       "size": "16948420",
	I0916 11:12:07.683312 2195229 command_runner.go:130] >       "uid": null,
	I0916 11:12:07.683316 2195229 command_runner.go:130] >       "username": "nonroot",
	I0916 11:12:07.683320 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.683326 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.683329 2195229 command_runner.go:130] >     },
	I0916 11:12:07.683336 2195229 command_runner.go:130] >     {
	I0916 11:12:07.683343 2195229 command_runner.go:130] >       "id": "sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da",
	I0916 11:12:07.683346 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.683351 2195229 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.15-0"
	I0916 11:12:07.683357 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683361 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.683370 2195229 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"
	I0916 11:12:07.683375 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683387 2195229 command_runner.go:130] >       "size": "66535646",
	I0916 11:12:07.683390 2195229 command_runner.go:130] >       "uid": {
	I0916 11:12:07.683395 2195229 command_runner.go:130] >         "value": "0"
	I0916 11:12:07.683399 2195229 command_runner.go:130] >       },
	I0916 11:12:07.683402 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.683408 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.683412 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.683414 2195229 command_runner.go:130] >     },
	I0916 11:12:07.683417 2195229 command_runner.go:130] >     {
	I0916 11:12:07.683424 2195229 command_runner.go:130] >       "id": "sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853",
	I0916 11:12:07.683430 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.683435 2195229 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.31.1"
	I0916 11:12:07.683440 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683444 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.683453 2195229 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"
	I0916 11:12:07.683457 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683461 2195229 command_runner.go:130] >       "size": "25687130",
	I0916 11:12:07.683464 2195229 command_runner.go:130] >       "uid": {
	I0916 11:12:07.683467 2195229 command_runner.go:130] >         "value": "0"
	I0916 11:12:07.683470 2195229 command_runner.go:130] >       },
	I0916 11:12:07.683474 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.683480 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.683483 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.683486 2195229 command_runner.go:130] >     },
	I0916 11:12:07.683489 2195229 command_runner.go:130] >     {
	I0916 11:12:07.683497 2195229 command_runner.go:130] >       "id": "sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e",
	I0916 11:12:07.683500 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.683506 2195229 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.31.1"
	I0916 11:12:07.683509 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683512 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.683520 2195229 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"
	I0916 11:12:07.683523 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683528 2195229 command_runner.go:130] >       "size": "23948670",
	I0916 11:12:07.683534 2195229 command_runner.go:130] >       "uid": {
	I0916 11:12:07.683537 2195229 command_runner.go:130] >         "value": "0"
	I0916 11:12:07.683540 2195229 command_runner.go:130] >       },
	I0916 11:12:07.683544 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.683550 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.683554 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.683559 2195229 command_runner.go:130] >     },
	I0916 11:12:07.683562 2195229 command_runner.go:130] >     {
	I0916 11:12:07.683568 2195229 command_runner.go:130] >       "id": "sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d",
	I0916 11:12:07.683575 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.683579 2195229 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.31.1"
	I0916 11:12:07.683593 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683596 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.683604 2195229 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"
	I0916 11:12:07.683607 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683611 2195229 command_runner.go:130] >       "size": "26756812",
	I0916 11:12:07.683615 2195229 command_runner.go:130] >       "uid": null,
	I0916 11:12:07.683619 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.683625 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.683629 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.683632 2195229 command_runner.go:130] >     },
	I0916 11:12:07.683635 2195229 command_runner.go:130] >     {
	I0916 11:12:07.683642 2195229 command_runner.go:130] >       "id": "sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d",
	I0916 11:12:07.683648 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.683653 2195229 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.31.1"
	I0916 11:12:07.683658 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683663 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.683680 2195229 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"
	I0916 11:12:07.683683 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683687 2195229 command_runner.go:130] >       "size": "18507674",
	I0916 11:12:07.683690 2195229 command_runner.go:130] >       "uid": {
	I0916 11:12:07.683694 2195229 command_runner.go:130] >         "value": "0"
	I0916 11:12:07.683699 2195229 command_runner.go:130] >       },
	I0916 11:12:07.683702 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.683706 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.683709 2195229 command_runner.go:130] >       "pinned": false
	I0916 11:12:07.683714 2195229 command_runner.go:130] >     },
	I0916 11:12:07.683720 2195229 command_runner.go:130] >     {
	I0916 11:12:07.683731 2195229 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0916 11:12:07.683739 2195229 command_runner.go:130] >       "repoTags": [
	I0916 11:12:07.683743 2195229 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0916 11:12:07.683746 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683750 2195229 command_runner.go:130] >       "repoDigests": [
	I0916 11:12:07.683758 2195229 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0916 11:12:07.683764 2195229 command_runner.go:130] >       ],
	I0916 11:12:07.683768 2195229 command_runner.go:130] >       "size": "267933",
	I0916 11:12:07.683771 2195229 command_runner.go:130] >       "uid": {
	I0916 11:12:07.683775 2195229 command_runner.go:130] >         "value": "65535"
	I0916 11:12:07.683782 2195229 command_runner.go:130] >       },
	I0916 11:12:07.683786 2195229 command_runner.go:130] >       "username": "",
	I0916 11:12:07.683790 2195229 command_runner.go:130] >       "spec": null,
	I0916 11:12:07.683793 2195229 command_runner.go:130] >       "pinned": true
	I0916 11:12:07.683796 2195229 command_runner.go:130] >     }
	I0916 11:12:07.683802 2195229 command_runner.go:130] >   ]
	I0916 11:12:07.683807 2195229 command_runner.go:130] > }
	I0916 11:12:07.683931 2195229 containerd.go:627] all images are preloaded for containerd runtime.
	I0916 11:12:07.683943 2195229 cache_images.go:84] Images are preloaded, skipping loading
	I0916 11:12:07.683951 2195229 kubeadm.go:934] updating node { 192.168.58.2 8443 v1.31.1 containerd true true} ...
	I0916 11:12:07.684055 2195229 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-890146 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.58.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:12:07.684127 2195229 ssh_runner.go:195] Run: sudo crictl info
	I0916 11:12:07.716350 2195229 command_runner.go:130] > {
	I0916 11:12:07.716375 2195229 command_runner.go:130] >   "status": {
	I0916 11:12:07.716380 2195229 command_runner.go:130] >     "conditions": [
	I0916 11:12:07.716387 2195229 command_runner.go:130] >       {
	I0916 11:12:07.716393 2195229 command_runner.go:130] >         "type": "RuntimeReady",
	I0916 11:12:07.716398 2195229 command_runner.go:130] >         "status": true,
	I0916 11:12:07.716402 2195229 command_runner.go:130] >         "reason": "",
	I0916 11:12:07.716410 2195229 command_runner.go:130] >         "message": ""
	I0916 11:12:07.716414 2195229 command_runner.go:130] >       },
	I0916 11:12:07.716417 2195229 command_runner.go:130] >       {
	I0916 11:12:07.716422 2195229 command_runner.go:130] >         "type": "NetworkReady",
	I0916 11:12:07.716426 2195229 command_runner.go:130] >         "status": true,
	I0916 11:12:07.716434 2195229 command_runner.go:130] >         "reason": "",
	I0916 11:12:07.716443 2195229 command_runner.go:130] >         "message": ""
	I0916 11:12:07.716450 2195229 command_runner.go:130] >       },
	I0916 11:12:07.716453 2195229 command_runner.go:130] >       {
	I0916 11:12:07.716458 2195229 command_runner.go:130] >         "type": "ContainerdHasNoDeprecationWarnings",
	I0916 11:12:07.716462 2195229 command_runner.go:130] >         "status": true,
	I0916 11:12:07.716468 2195229 command_runner.go:130] >         "reason": "",
	I0916 11:12:07.716475 2195229 command_runner.go:130] >         "message": ""
	I0916 11:12:07.716481 2195229 command_runner.go:130] >       }
	I0916 11:12:07.716485 2195229 command_runner.go:130] >     ]
	I0916 11:12:07.716489 2195229 command_runner.go:130] >   },
	I0916 11:12:07.716493 2195229 command_runner.go:130] >   "cniconfig": {
	I0916 11:12:07.716498 2195229 command_runner.go:130] >     "PluginDirs": [
	I0916 11:12:07.716502 2195229 command_runner.go:130] >       "/opt/cni/bin"
	I0916 11:12:07.716505 2195229 command_runner.go:130] >     ],
	I0916 11:12:07.716512 2195229 command_runner.go:130] >     "PluginConfDir": "/etc/cni/net.d",
	I0916 11:12:07.716521 2195229 command_runner.go:130] >     "PluginMaxConfNum": 1,
	I0916 11:12:07.716525 2195229 command_runner.go:130] >     "Prefix": "eth",
	I0916 11:12:07.716529 2195229 command_runner.go:130] >     "Networks": [
	I0916 11:12:07.716532 2195229 command_runner.go:130] >       {
	I0916 11:12:07.716538 2195229 command_runner.go:130] >         "Config": {
	I0916 11:12:07.716543 2195229 command_runner.go:130] >           "Name": "cni-loopback",
	I0916 11:12:07.716550 2195229 command_runner.go:130] >           "CNIVersion": "0.3.1",
	I0916 11:12:07.716554 2195229 command_runner.go:130] >           "Plugins": [
	I0916 11:12:07.716557 2195229 command_runner.go:130] >             {
	I0916 11:12:07.716561 2195229 command_runner.go:130] >               "Network": {
	I0916 11:12:07.716572 2195229 command_runner.go:130] >                 "type": "loopback",
	I0916 11:12:07.716576 2195229 command_runner.go:130] >                 "ipam": {},
	I0916 11:12:07.716581 2195229 command_runner.go:130] >                 "dns": {}
	I0916 11:12:07.716589 2195229 command_runner.go:130] >               },
	I0916 11:12:07.716594 2195229 command_runner.go:130] >               "Source": "{\"type\":\"loopback\"}"
	I0916 11:12:07.716598 2195229 command_runner.go:130] >             }
	I0916 11:12:07.716603 2195229 command_runner.go:130] >           ],
	I0916 11:12:07.716614 2195229 command_runner.go:130] >           "Source": "{\n\"cniVersion\": \"0.3.1\",\n\"name\": \"cni-loopback\",\n\"plugins\": [{\n  \"type\": \"loopback\"\n}]\n}"
	I0916 11:12:07.716621 2195229 command_runner.go:130] >         },
	I0916 11:12:07.716624 2195229 command_runner.go:130] >         "IFName": "lo"
	I0916 11:12:07.716628 2195229 command_runner.go:130] >       },
	I0916 11:12:07.716631 2195229 command_runner.go:130] >       {
	I0916 11:12:07.716635 2195229 command_runner.go:130] >         "Config": {
	I0916 11:12:07.716641 2195229 command_runner.go:130] >           "Name": "kindnet",
	I0916 11:12:07.716646 2195229 command_runner.go:130] >           "CNIVersion": "0.3.1",
	I0916 11:12:07.716655 2195229 command_runner.go:130] >           "Plugins": [
	I0916 11:12:07.716659 2195229 command_runner.go:130] >             {
	I0916 11:12:07.716678 2195229 command_runner.go:130] >               "Network": {
	I0916 11:12:07.716687 2195229 command_runner.go:130] >                 "type": "ptp",
	I0916 11:12:07.716691 2195229 command_runner.go:130] >                 "ipam": {
	I0916 11:12:07.716696 2195229 command_runner.go:130] >                   "type": "host-local"
	I0916 11:12:07.716702 2195229 command_runner.go:130] >                 },
	I0916 11:12:07.716707 2195229 command_runner.go:130] >                 "dns": {}
	I0916 11:12:07.716712 2195229 command_runner.go:130] >               },
	I0916 11:12:07.716726 2195229 command_runner.go:130] >               "Source": "{\"ipMasq\":false,\"ipam\":{\"dataDir\":\"/run/cni-ipam-state\",\"ranges\":[[{\"subnet\":\"10.244.0.0/24\"}]],\"routes\":[{\"dst\":\"0.0.0.0/0\"}],\"type\":\"host-local\"},\"mtu\":1500,\"type\":\"ptp\"}"
	I0916 11:12:07.716733 2195229 command_runner.go:130] >             },
	I0916 11:12:07.716738 2195229 command_runner.go:130] >             {
	I0916 11:12:07.716742 2195229 command_runner.go:130] >               "Network": {
	I0916 11:12:07.716752 2195229 command_runner.go:130] >                 "type": "portmap",
	I0916 11:12:07.716757 2195229 command_runner.go:130] >                 "capabilities": {
	I0916 11:12:07.716764 2195229 command_runner.go:130] >                   "portMappings": true
	I0916 11:12:07.716770 2195229 command_runner.go:130] >                 },
	I0916 11:12:07.716774 2195229 command_runner.go:130] >                 "ipam": {},
	I0916 11:12:07.716778 2195229 command_runner.go:130] >                 "dns": {}
	I0916 11:12:07.716781 2195229 command_runner.go:130] >               },
	I0916 11:12:07.716792 2195229 command_runner.go:130] >               "Source": "{\"capabilities\":{\"portMappings\":true},\"type\":\"portmap\"}"
	I0916 11:12:07.716797 2195229 command_runner.go:130] >             }
	I0916 11:12:07.716801 2195229 command_runner.go:130] >           ],
	I0916 11:12:07.716834 2195229 command_runner.go:130] >           "Source": "\n{\n\t\"cniVersion\": \"0.3.1\",\n\t\"name\": \"kindnet\",\n\t\"plugins\": [\n\t{\n\t\t\"type\": \"ptp\",\n\t\t\"ipMasq\": false,\n\t\t\"ipam\": {\n\t\t\t\"type\": \"host-local\",\n\t\t\t\"dataDir\": \"/run/cni-ipam-state\",\n\t\t\t\"routes\": [\n\t\t\t\t\n\t\t\t\t\n\t\t\t\t{ \"dst\": \"0.0.0.0/0\" }\n\t\t\t],\n\t\t\t\"ranges\": [\n\t\t\t\t\n\t\t\t\t\n\t\t\t\t[ { \"subnet\": \"10.244.0.0/24\" } ]\n\t\t\t]\n\t\t}\n\t\t,\n\t\t\"mtu\": 1500\n\t\t\n\t},\n\t{\n\t\t\"type\": \"portmap\",\n\t\t\"capabilities\": {\n\t\t\t\"portMappings\": true\n\t\t}\n\t}\n\t]\n}\n"
	I0916 11:12:07.716849 2195229 command_runner.go:130] >         },
	I0916 11:12:07.716854 2195229 command_runner.go:130] >         "IFName": "eth0"
	I0916 11:12:07.716857 2195229 command_runner.go:130] >       }
	I0916 11:12:07.716860 2195229 command_runner.go:130] >     ]
	I0916 11:12:07.716863 2195229 command_runner.go:130] >   },
	I0916 11:12:07.716867 2195229 command_runner.go:130] >   "config": {
	I0916 11:12:07.716873 2195229 command_runner.go:130] >     "containerd": {
	I0916 11:12:07.716878 2195229 command_runner.go:130] >       "snapshotter": "overlayfs",
	I0916 11:12:07.716885 2195229 command_runner.go:130] >       "defaultRuntimeName": "runc",
	I0916 11:12:07.716890 2195229 command_runner.go:130] >       "defaultRuntime": {
	I0916 11:12:07.716893 2195229 command_runner.go:130] >         "runtimeType": "",
	I0916 11:12:07.716898 2195229 command_runner.go:130] >         "runtimePath": "",
	I0916 11:12:07.716902 2195229 command_runner.go:130] >         "runtimeEngine": "",
	I0916 11:12:07.716909 2195229 command_runner.go:130] >         "PodAnnotations": null,
	I0916 11:12:07.716913 2195229 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 11:12:07.716923 2195229 command_runner.go:130] >         "runtimeRoot": "",
	I0916 11:12:07.716927 2195229 command_runner.go:130] >         "options": null,
	I0916 11:12:07.716932 2195229 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 11:12:07.716940 2195229 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:12:07.716944 2195229 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 11:12:07.716948 2195229 command_runner.go:130] >         "cniConfDir": "",
	I0916 11:12:07.716953 2195229 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 11:12:07.716958 2195229 command_runner.go:130] >         "snapshotter": "",
	I0916 11:12:07.716964 2195229 command_runner.go:130] >         "sandboxMode": ""
	I0916 11:12:07.716971 2195229 command_runner.go:130] >       },
	I0916 11:12:07.716975 2195229 command_runner.go:130] >       "untrustedWorkloadRuntime": {
	I0916 11:12:07.716979 2195229 command_runner.go:130] >         "runtimeType": "",
	I0916 11:12:07.716983 2195229 command_runner.go:130] >         "runtimePath": "",
	I0916 11:12:07.716989 2195229 command_runner.go:130] >         "runtimeEngine": "",
	I0916 11:12:07.716994 2195229 command_runner.go:130] >         "PodAnnotations": null,
	I0916 11:12:07.717000 2195229 command_runner.go:130] >         "ContainerAnnotations": null,
	I0916 11:12:07.717004 2195229 command_runner.go:130] >         "runtimeRoot": "",
	I0916 11:12:07.717014 2195229 command_runner.go:130] >         "options": null,
	I0916 11:12:07.717019 2195229 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0916 11:12:07.717027 2195229 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:12:07.717035 2195229 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0916 11:12:07.717040 2195229 command_runner.go:130] >         "cniConfDir": "",
	I0916 11:12:07.717044 2195229 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0916 11:12:07.717054 2195229 command_runner.go:130] >         "snapshotter": "",
	I0916 11:12:07.717058 2195229 command_runner.go:130] >         "sandboxMode": ""
	I0916 11:12:07.717061 2195229 command_runner.go:130] >       },
	I0916 11:12:07.717065 2195229 command_runner.go:130] >       "runtimes": {
	I0916 11:12:07.717069 2195229 command_runner.go:130] >         "runc": {
	I0916 11:12:07.717074 2195229 command_runner.go:130] >           "runtimeType": "io.containerd.runc.v2",
	I0916 11:12:07.717081 2195229 command_runner.go:130] >           "runtimePath": "",
	I0916 11:12:07.717085 2195229 command_runner.go:130] >           "runtimeEngine": "",
	I0916 11:12:07.717092 2195229 command_runner.go:130] >           "PodAnnotations": null,
	I0916 11:12:07.717102 2195229 command_runner.go:130] >           "ContainerAnnotations": null,
	I0916 11:12:07.717107 2195229 command_runner.go:130] >           "runtimeRoot": "",
	I0916 11:12:07.717111 2195229 command_runner.go:130] >           "options": {
	I0916 11:12:07.717117 2195229 command_runner.go:130] >             "SystemdCgroup": false
	I0916 11:12:07.717121 2195229 command_runner.go:130] >           },
	I0916 11:12:07.717133 2195229 command_runner.go:130] >           "privileged_without_host_devices": false,
	I0916 11:12:07.717142 2195229 command_runner.go:130] >           "privileged_without_host_devices_all_devices_allowed": false,
	I0916 11:12:07.717147 2195229 command_runner.go:130] >           "baseRuntimeSpec": "",
	I0916 11:12:07.717153 2195229 command_runner.go:130] >           "cniConfDir": "",
	I0916 11:12:07.717157 2195229 command_runner.go:130] >           "cniMaxConfNum": 0,
	I0916 11:12:07.717162 2195229 command_runner.go:130] >           "snapshotter": "",
	I0916 11:12:07.717166 2195229 command_runner.go:130] >           "sandboxMode": "podsandbox"
	I0916 11:12:07.717170 2195229 command_runner.go:130] >         }
	I0916 11:12:07.717173 2195229 command_runner.go:130] >       },
	I0916 11:12:07.717180 2195229 command_runner.go:130] >       "noPivot": false,
	I0916 11:12:07.717185 2195229 command_runner.go:130] >       "disableSnapshotAnnotations": true,
	I0916 11:12:07.717192 2195229 command_runner.go:130] >       "discardUnpackedLayers": true,
	I0916 11:12:07.717198 2195229 command_runner.go:130] >       "ignoreBlockIONotEnabledErrors": false,
	I0916 11:12:07.717202 2195229 command_runner.go:130] >       "ignoreRdtNotEnabledErrors": false
	I0916 11:12:07.717208 2195229 command_runner.go:130] >     },
	I0916 11:12:07.717211 2195229 command_runner.go:130] >     "cni": {
	I0916 11:12:07.717215 2195229 command_runner.go:130] >       "binDir": "/opt/cni/bin",
	I0916 11:12:07.717220 2195229 command_runner.go:130] >       "confDir": "/etc/cni/net.d",
	I0916 11:12:07.717229 2195229 command_runner.go:130] >       "maxConfNum": 1,
	I0916 11:12:07.717232 2195229 command_runner.go:130] >       "setupSerially": false,
	I0916 11:12:07.717237 2195229 command_runner.go:130] >       "confTemplate": "",
	I0916 11:12:07.717246 2195229 command_runner.go:130] >       "ipPref": ""
	I0916 11:12:07.717249 2195229 command_runner.go:130] >     },
	I0916 11:12:07.717252 2195229 command_runner.go:130] >     "registry": {
	I0916 11:12:07.717257 2195229 command_runner.go:130] >       "configPath": "/etc/containerd/certs.d",
	I0916 11:12:07.717262 2195229 command_runner.go:130] >       "mirrors": null,
	I0916 11:12:07.717268 2195229 command_runner.go:130] >       "configs": null,
	I0916 11:12:07.717272 2195229 command_runner.go:130] >       "auths": null,
	I0916 11:12:07.717276 2195229 command_runner.go:130] >       "headers": null
	I0916 11:12:07.717285 2195229 command_runner.go:130] >     },
	I0916 11:12:07.717288 2195229 command_runner.go:130] >     "imageDecryption": {
	I0916 11:12:07.717295 2195229 command_runner.go:130] >       "keyModel": "node"
	I0916 11:12:07.717301 2195229 command_runner.go:130] >     },
	I0916 11:12:07.717305 2195229 command_runner.go:130] >     "disableTCPService": true,
	I0916 11:12:07.717309 2195229 command_runner.go:130] >     "streamServerAddress": "",
	I0916 11:12:07.717320 2195229 command_runner.go:130] >     "streamServerPort": "10010",
	I0916 11:12:07.717324 2195229 command_runner.go:130] >     "streamIdleTimeout": "4h0m0s",
	I0916 11:12:07.717328 2195229 command_runner.go:130] >     "enableSelinux": false,
	I0916 11:12:07.717337 2195229 command_runner.go:130] >     "selinuxCategoryRange": 1024,
	I0916 11:12:07.717342 2195229 command_runner.go:130] >     "sandboxImage": "registry.k8s.io/pause:3.10",
	I0916 11:12:07.717346 2195229 command_runner.go:130] >     "statsCollectPeriod": 10,
	I0916 11:12:07.717350 2195229 command_runner.go:130] >     "systemdCgroup": false,
	I0916 11:12:07.717357 2195229 command_runner.go:130] >     "enableTLSStreaming": false,
	I0916 11:12:07.717361 2195229 command_runner.go:130] >     "x509KeyPairStreaming": {
	I0916 11:12:07.717368 2195229 command_runner.go:130] >       "tlsCertFile": "",
	I0916 11:12:07.717372 2195229 command_runner.go:130] >       "tlsKeyFile": ""
	I0916 11:12:07.717377 2195229 command_runner.go:130] >     },
	I0916 11:12:07.717382 2195229 command_runner.go:130] >     "maxContainerLogSize": 16384,
	I0916 11:12:07.717388 2195229 command_runner.go:130] >     "disableCgroup": false,
	I0916 11:12:07.717392 2195229 command_runner.go:130] >     "disableApparmor": false,
	I0916 11:12:07.717397 2195229 command_runner.go:130] >     "restrictOOMScoreAdj": false,
	I0916 11:12:07.717403 2195229 command_runner.go:130] >     "maxConcurrentDownloads": 3,
	I0916 11:12:07.717408 2195229 command_runner.go:130] >     "disableProcMount": false,
	I0916 11:12:07.717414 2195229 command_runner.go:130] >     "unsetSeccompProfile": "",
	I0916 11:12:07.717419 2195229 command_runner.go:130] >     "tolerateMissingHugetlbController": true,
	I0916 11:12:07.717424 2195229 command_runner.go:130] >     "disableHugetlbController": true,
	I0916 11:12:07.717430 2195229 command_runner.go:130] >     "device_ownership_from_security_context": false,
	I0916 11:12:07.717436 2195229 command_runner.go:130] >     "ignoreImageDefinedVolumes": false,
	I0916 11:12:07.717442 2195229 command_runner.go:130] >     "netnsMountsUnderStateDir": false,
	I0916 11:12:07.717449 2195229 command_runner.go:130] >     "enableUnprivilegedPorts": true,
	I0916 11:12:07.717453 2195229 command_runner.go:130] >     "enableUnprivilegedICMP": false,
	I0916 11:12:07.717459 2195229 command_runner.go:130] >     "enableCDI": false,
	I0916 11:12:07.717463 2195229 command_runner.go:130] >     "cdiSpecDirs": [
	I0916 11:12:07.717466 2195229 command_runner.go:130] >       "/etc/cdi",
	I0916 11:12:07.717470 2195229 command_runner.go:130] >       "/var/run/cdi"
	I0916 11:12:07.717475 2195229 command_runner.go:130] >     ],
	I0916 11:12:07.717479 2195229 command_runner.go:130] >     "imagePullProgressTimeout": "5m0s",
	I0916 11:12:07.717490 2195229 command_runner.go:130] >     "drainExecSyncIOTimeout": "0s",
	I0916 11:12:07.717494 2195229 command_runner.go:130] >     "imagePullWithSyncFs": false,
	I0916 11:12:07.717499 2195229 command_runner.go:130] >     "ignoreDeprecationWarnings": null,
	I0916 11:12:07.717507 2195229 command_runner.go:130] >     "containerdRootDir": "/var/lib/containerd",
	I0916 11:12:07.717512 2195229 command_runner.go:130] >     "containerdEndpoint": "/run/containerd/containerd.sock",
	I0916 11:12:07.717518 2195229 command_runner.go:130] >     "rootDir": "/var/lib/containerd/io.containerd.grpc.v1.cri",
	I0916 11:12:07.717524 2195229 command_runner.go:130] >     "stateDir": "/run/containerd/io.containerd.grpc.v1.cri"
	I0916 11:12:07.717529 2195229 command_runner.go:130] >   },
	I0916 11:12:07.717533 2195229 command_runner.go:130] >   "golang": "go1.22.7",
	I0916 11:12:07.717539 2195229 command_runner.go:130] >   "lastCNILoadStatus": "OK",
	I0916 11:12:07.717544 2195229 command_runner.go:130] >   "lastCNILoadStatus.default": "OK"
	I0916 11:12:07.717551 2195229 command_runner.go:130] > }
	I0916 11:12:07.720650 2195229 cni.go:84] Creating CNI manager for ""
	I0916 11:12:07.720685 2195229 cni.go:136] multinode detected (2 nodes found), recommending kindnet
	I0916 11:12:07.720696 2195229 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:12:07.720739 2195229 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.58.2 APIServerPort:8443 KubernetesVersion:v1.31.1 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-890146 NodeName:multinode-890146 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.58.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.58.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPat
h:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:12:07.720898 2195229 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.58.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "multinode-890146"
	  kubeletExtraArgs:
	    node-ip: 192.168.58.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.58.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.31.1
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:12:07.720974 2195229 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:12:07.729186 2195229 command_runner.go:130] > kubeadm
	I0916 11:12:07.729216 2195229 command_runner.go:130] > kubectl
	I0916 11:12:07.729229 2195229 command_runner.go:130] > kubelet
	I0916 11:12:07.730366 2195229 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:12:07.730436 2195229 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0916 11:12:07.739556 2195229 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (320 bytes)
	I0916 11:12:07.758595 2195229 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:12:07.776783 2195229 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2170 bytes)
	I0916 11:12:07.795313 2195229 ssh_runner.go:195] Run: grep 192.168.58.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:12:07.798699 2195229 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.58.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:12:07.809679 2195229 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:12:07.893107 2195229 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:12:07.908656 2195229 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146 for IP: 192.168.58.2
	I0916 11:12:07.908678 2195229 certs.go:194] generating shared ca certs ...
	I0916 11:12:07.908694 2195229 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:12:07.908842 2195229 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 11:12:07.908889 2195229 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 11:12:07.908901 2195229 certs.go:256] generating profile certs ...
	I0916 11:12:07.908999 2195229 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key
	I0916 11:12:07.909053 2195229 certs.go:359] skipping valid signed profile cert regeneration for "minikube": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key.cd70a0e7
	I0916 11:12:07.909101 2195229 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key
	I0916 11:12:07.909118 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:12:07.909133 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:12:07.909149 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:12:07.909162 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:12:07.909177 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0916 11:12:07.909194 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0916 11:12:07.909210 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0916 11:12:07.909223 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0916 11:12:07.909277 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 11:12:07.909309 2195229 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 11:12:07.909318 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:12:07.909342 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 11:12:07.909373 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:12:07.909411 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 11:12:07.909458 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:12:07.909492 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 11:12:07.909504 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:12:07.909522 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 11:12:07.910069 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:12:07.939073 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 11:12:07.966838 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:12:07.999145 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 11:12:08.045787 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0916 11:12:08.089032 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0916 11:12:08.134377 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0916 11:12:08.175677 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0916 11:12:08.204071 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 11:12:08.244718 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:12:08.275363 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 11:12:08.308049 2195229 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0916 11:12:08.326436 2195229 ssh_runner.go:195] Run: openssl version
	I0916 11:12:08.332053 2195229 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:12:08.332525 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 11:12:08.342153 2195229 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 11:12:08.345731 2195229 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:12:08.345765 2195229 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:12:08.345817 2195229 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 11:12:08.352554 2195229 command_runner.go:130] > 51391683
	I0916 11:12:08.353020 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 11:12:08.362456 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 11:12:08.372108 2195229 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 11:12:08.375770 2195229 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:12:08.375887 2195229 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:12:08.375952 2195229 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 11:12:08.382453 2195229 command_runner.go:130] > 3ec20f2e
	I0916 11:12:08.382961 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:12:08.392527 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:12:08.402291 2195229 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:12:08.406120 2195229 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:12:08.406152 2195229 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:12:08.406211 2195229 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:12:08.413001 2195229 command_runner.go:130] > b5213941
	I0916 11:12:08.413486 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:12:08.422644 2195229 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:12:08.426113 2195229 command_runner.go:130] >   File: /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:12:08.426138 2195229 command_runner.go:130] >   Size: 1176      	Blocks: 8          IO Block: 4096   regular file
	I0916 11:12:08.426146 2195229 command_runner.go:130] > Device: 10301h/66305d	Inode: 1081533     Links: 1
	I0916 11:12:08.426153 2195229 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:12:08.426186 2195229 command_runner.go:130] > Access: 2024-09-16 11:09:51.203544044 +0000
	I0916 11:12:08.426195 2195229 command_runner.go:130] > Modify: 2024-09-16 11:07:17.780345562 +0000
	I0916 11:12:08.426201 2195229 command_runner.go:130] > Change: 2024-09-16 11:07:17.780345562 +0000
	I0916 11:12:08.426205 2195229 command_runner.go:130] >  Birth: 2024-09-16 11:07:17.780345562 +0000
	I0916 11:12:08.426309 2195229 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-etcd-client.crt -checkend 86400
	I0916 11:12:08.433243 2195229 command_runner.go:130] > Certificate will not expire
	I0916 11:12:08.433642 2195229 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/apiserver-kubelet-client.crt -checkend 86400
	I0916 11:12:08.440227 2195229 command_runner.go:130] > Certificate will not expire
	I0916 11:12:08.440618 2195229 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/server.crt -checkend 86400
	I0916 11:12:08.447642 2195229 command_runner.go:130] > Certificate will not expire
	I0916 11:12:08.447714 2195229 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/healthcheck-client.crt -checkend 86400
	I0916 11:12:08.454270 2195229 command_runner.go:130] > Certificate will not expire
	I0916 11:12:08.454736 2195229 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/etcd/peer.crt -checkend 86400
	I0916 11:12:08.461436 2195229 command_runner.go:130] > Certificate will not expire
	I0916 11:12:08.461831 2195229 ssh_runner.go:195] Run: openssl x509 -noout -in /var/lib/minikube/certs/front-proxy-client.crt -checkend 86400
	I0916 11:12:08.468677 2195229 command_runner.go:130] > Certificate will not expire
	I0916 11:12:08.469121 2195229 kubeadm.go:392] StartCluster: {Name:multinode-890146 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-instal
ler:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:12:08.469244 2195229 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0916 11:12:08.469315 2195229 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0916 11:12:08.503558 2195229 command_runner.go:130] > 07054c18240bddcff9366f96beae270b49083c8c0a3d760859239bbf844ea0cd
	I0916 11:12:08.503611 2195229 command_runner.go:130] > 0d3abd904fe54a1978d08d9978009d74bddb24839e5a5ca370830593e207f392
	I0916 11:12:08.503758 2195229 command_runner.go:130] > 92299bff0d2567667e4546c0c60418026422290d336e9613b5aed5df8af84cc5
	I0916 11:12:08.503902 2195229 command_runner.go:130] > 654ecbfed03d8e141ce6b52bf8bf1bd98bc2fcb02e1f9df6947b6c89d77bde4b
	I0916 11:12:08.504019 2195229 command_runner.go:130] > 07455bc60716ac512dd7e5994733e02cc35ce9f026df34c778b61b0551008067
	I0916 11:12:08.504173 2195229 command_runner.go:130] > 2cc43e414446d6a831068e3af8cf0e1b1501b91167342420a33bdc74bc31c020
	I0916 11:12:08.504224 2195229 command_runner.go:130] > 5973d4702c82301758aca3fa2a6a770d5ce1c6ff9abd4830207a977a63162fdc
	I0916 11:12:08.504379 2195229 command_runner.go:130] > 9a6e3be38656a34c99c98c1d83ac245ced91c2c4e06160058130d7bdf77a6cb2
	I0916 11:12:08.511148 2195229 cri.go:89] found id: "07054c18240bddcff9366f96beae270b49083c8c0a3d760859239bbf844ea0cd"
	I0916 11:12:08.511170 2195229 cri.go:89] found id: "0d3abd904fe54a1978d08d9978009d74bddb24839e5a5ca370830593e207f392"
	I0916 11:12:08.511175 2195229 cri.go:89] found id: "92299bff0d2567667e4546c0c60418026422290d336e9613b5aed5df8af84cc5"
	I0916 11:12:08.511179 2195229 cri.go:89] found id: "654ecbfed03d8e141ce6b52bf8bf1bd98bc2fcb02e1f9df6947b6c89d77bde4b"
	I0916 11:12:08.511182 2195229 cri.go:89] found id: "07455bc60716ac512dd7e5994733e02cc35ce9f026df34c778b61b0551008067"
	I0916 11:12:08.511186 2195229 cri.go:89] found id: "2cc43e414446d6a831068e3af8cf0e1b1501b91167342420a33bdc74bc31c020"
	I0916 11:12:08.511189 2195229 cri.go:89] found id: "5973d4702c82301758aca3fa2a6a770d5ce1c6ff9abd4830207a977a63162fdc"
	I0916 11:12:08.511192 2195229 cri.go:89] found id: "9a6e3be38656a34c99c98c1d83ac245ced91c2c4e06160058130d7bdf77a6cb2"
	I0916 11:12:08.511195 2195229 cri.go:89] found id: ""
	I0916 11:12:08.511246 2195229 ssh_runner.go:195] Run: sudo runc --root /run/containerd/runc/k8s.io list -f json
	I0916 11:12:08.526618 2195229 command_runner.go:130] > null
	I0916 11:12:08.528514 2195229 cri.go:116] JSON = null
	W0916 11:12:08.528570 2195229 kubeadm.go:399] unpause failed: list paused: list returned 0 containers, but ps returned 8
	I0916 11:12:08.528654 2195229 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0916 11:12:08.536635 2195229 command_runner.go:130] > /var/lib/kubelet/config.yaml
	I0916 11:12:08.536716 2195229 command_runner.go:130] > /var/lib/kubelet/kubeadm-flags.env
	I0916 11:12:08.536737 2195229 command_runner.go:130] > /var/lib/minikube/etcd:
	I0916 11:12:08.536754 2195229 command_runner.go:130] > member
	I0916 11:12:08.538007 2195229 kubeadm.go:408] found existing configuration files, will attempt cluster restart
	I0916 11:12:08.538063 2195229 kubeadm.go:593] restartPrimaryControlPlane start ...
	I0916 11:12:08.538146 2195229 ssh_runner.go:195] Run: sudo test -d /data/minikube
	I0916 11:12:08.547552 2195229 kubeadm.go:130] /data/minikube skipping compat symlinks: sudo test -d /data/minikube: Process exited with status 1
	stdout:
	
	stderr:
	I0916 11:12:08.548092 2195229 kubeconfig.go:47] verify endpoint returned: get endpoint: "multinode-890146" does not appear in /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:12:08.548257 2195229 kubeconfig.go:62] /home/jenkins/minikube-integration/19651-2057935/kubeconfig needs updating (will repair): [kubeconfig missing "multinode-890146" cluster setting kubeconfig missing "multinode-890146" context setting]
	I0916 11:12:08.548590 2195229 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:12:08.549089 2195229 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:12:08.549415 2195229 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:12:08.550065 2195229 cert_rotation.go:140] Starting client certificate rotation controller
	I0916 11:12:08.550389 2195229 ssh_runner.go:195] Run: sudo diff -u /var/tmp/minikube/kubeadm.yaml /var/tmp/minikube/kubeadm.yaml.new
	I0916 11:12:08.563518 2195229 kubeadm.go:630] The running cluster does not require reconfiguration: 192.168.58.2
	I0916 11:12:08.563596 2195229 kubeadm.go:597] duration metric: took 25.514347ms to restartPrimaryControlPlane
	I0916 11:12:08.563620 2195229 kubeadm.go:394] duration metric: took 94.50684ms to StartCluster
	I0916 11:12:08.563662 2195229 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:12:08.563750 2195229 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:12:08.564498 2195229 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:12:08.564760 2195229 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.58.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:12:08.565205 2195229 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:12:08.565634 2195229 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:12:08.569788 2195229 out.go:177] * Enabled addons: 
	I0916 11:12:08.569908 2195229 out.go:177] * Verifying Kubernetes components...
	I0916 11:12:08.571977 2195229 addons.go:510] duration metric: took 6.76697ms for enable addons: enabled=[]
	I0916 11:12:08.573221 2195229 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:12:08.729195 2195229 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:12:08.748435 2195229 node_ready.go:35] waiting up to 6m0s for node "multinode-890146" to be "Ready" ...
	I0916 11:12:08.748626 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:08.748653 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:08.748695 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:08.748729 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:08.748990 2195229 round_trippers.go:574] Response Status:  in 0 milliseconds
	I0916 11:12:08.749045 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:09.248744 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:09.248820 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:09.248858 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:09.248879 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.111934 2195229 round_trippers.go:574] Response Status: 200 OK in 3863 milliseconds
	I0916 11:12:13.111959 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.111968 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:12:13.111973 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:12:13.111978 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.111981 2195229 round_trippers.go:580]     Audit-Id: 573506aa-a4e3-484d-8c87-f41862ab50d5
	I0916 11:12:13.111984 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.111987 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.112841 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:13.113639 2195229 node_ready.go:49] node "multinode-890146" has status "Ready":"True"
	I0916 11:12:13.113665 2195229 node_ready.go:38] duration metric: took 4.365148233s for node "multinode-890146" to be "Ready" ...
	I0916 11:12:13.113676 2195229 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:12:13.113729 2195229 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 11:12:13.113745 2195229 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 11:12:13.113807 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:12:13.113817 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.113825 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.113829 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.146356 2195229 round_trippers.go:574] Response Status: 200 OK in 32 milliseconds
	I0916 11:12:13.146380 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.146389 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:12:13.146399 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:12:13.146403 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.146406 2195229 round_trippers.go:580]     Audit-Id: 15f737f3-024c-4ffb-8487-0901f7dacd96
	I0916 11:12:13.146409 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.146412 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.155081 2195229 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"974"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"792","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f
:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{ [truncated 90726 chars]
	I0916 11:12:13.160692 2195229 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.160798 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:13.160810 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.160821 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.160830 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.164113 2195229 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:12:13.164138 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.164148 2195229 round_trippers.go:580]     Audit-Id: 01bca439-a78b-439d-a1a0-c9e790768187
	I0916 11:12:13.164153 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.164156 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.164159 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:12:13.164161 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:12:13.164164 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.164686 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"792","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":{
}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f:i [truncated 6693 chars]
	I0916 11:12:13.165258 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:13.165279 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.165289 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.165293 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.172341 2195229 round_trippers.go:574] Response Status: 200 OK in 7 milliseconds
	I0916 11:12:13.172368 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.172377 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:12:13.172381 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.172384 2195229 round_trippers.go:580]     Audit-Id: 6becd424-599a-4ed6-93f1-d5730681e771
	I0916 11:12:13.172388 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.172391 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.172394 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:12:13.172636 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:13.173020 2195229 pod_ready.go:93] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:13.173041 2195229 pod_ready.go:82] duration metric: took 12.312524ms for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.173052 2195229 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.173119 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-890146
	I0916 11:12:13.173129 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.173137 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.173142 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.179849 2195229 round_trippers.go:574] Response Status: 200 OK in 6 milliseconds
	I0916 11:12:13.179915 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.179939 2195229 round_trippers.go:580]     Audit-Id: 708e82c4-9350-4fcc-960e-9659f001ecfa
	I0916 11:12:13.179958 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.180103 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.180121 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:12:13.180136 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:12:13.180152 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.180304 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-890146","namespace":"kube-system","uid":"59c960ab-f6dd-4ed3-856c-ba2a02295b12","resourceVersion":"781","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.58.2:2379","kubernetes.io/config.hash":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.mirror":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.seen":"2024-09-16T11:07:32.783608135Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-cl
ient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config. [truncated 6653 chars]
	I0916 11:12:13.180832 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:13.180877 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.180900 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.180918 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.185034 2195229 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:12:13.185094 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.185117 2195229 round_trippers.go:580]     Audit-Id: 25904cce-d47f-4ad9-8395-82cf812a6238
	I0916 11:12:13.185137 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.185155 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.185169 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:12:13.185192 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:12:13.185209 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.185678 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:13.186155 2195229 pod_ready.go:93] pod "etcd-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:13.186183 2195229 pod_ready.go:82] duration metric: took 13.122489ms for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.186216 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.186302 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-890146
	I0916 11:12:13.186320 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.186342 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.186356 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.189678 2195229 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:12:13.189730 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.189752 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:12:13.189771 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.189788 2195229 round_trippers.go:580]     Audit-Id: 34de95a8-afea-4a48-8a91-0c4f6f27fc98
	I0916 11:12:13.189801 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.189817 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.189834 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:12:13.190287 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-890146","namespace":"kube-system","uid":"9846229d-7227-453f-8c30-697aaba61648","resourceVersion":"771","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.58.2:8443","kubernetes.io/config.hash":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.mirror":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.seen":"2024-09-16T11:07:32.783610957Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kube
rnetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes.i [truncated 8731 chars]
	I0916 11:12:13.190972 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:13.190993 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.191018 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.191028 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.192878 2195229 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:12:13.192924 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.192946 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.192964 2195229 round_trippers.go:580]     Audit-Id: 5a7111ad-1630-4115-b2f9-72d19ceda2e2
	I0916 11:12:13.192982 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.193002 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.193018 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:12:13.193033 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:12:13.193419 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:13.193876 2195229 pod_ready.go:93] pod "kube-apiserver-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:13.193903 2195229 pod_ready.go:82] duration metric: took 7.672409ms for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.193927 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.194018 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-890146
	I0916 11:12:13.194030 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.194055 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.194066 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.196083 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:13.196133 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.196156 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.196172 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.196188 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:12:13.196213 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:12:13.196232 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.196248 2195229 round_trippers.go:580]     Audit-Id: 3a2e2028-7a15-4299-9880-e418ae84b05a
	I0916 11:12:13.196733 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-890146","namespace":"kube-system","uid":"421b15a5-a8e2-4631-bf18-1592c8747b09","resourceVersion":"773","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.mirror":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.seen":"2024-09-16T11:07:32.783594137Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.i
o/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".":{ [truncated 8306 chars]
	I0916 11:12:13.197362 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:13.197380 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.197404 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.197415 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.199364 2195229 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:12:13.199415 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.199437 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:12:13.199452 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:12:13.199468 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.199486 2195229 round_trippers.go:580]     Audit-Id: c4fb0c61-e592-450e-9c79-b1b0890e9328
	I0916 11:12:13.199503 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.199519 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.199780 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"691","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:13.200222 2195229 pod_ready.go:93] pod "kube-controller-manager-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:13.200241 2195229 pod_ready.go:82] duration metric: took 6.300028ms for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.200264 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.200349 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:12:13.200368 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.200390 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.200395 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.204586 2195229 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:12:13.204660 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.204681 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.204706 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.204724 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 
	I0916 11:12:13.204740 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 
	I0916 11:12:13.204753 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.204769 2195229 round_trippers.go:580]     Audit-Id: eb7b1d1c-7cc3-44fc-aa21-1538a7028c89
	I0916 11:12:13.204983 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"885","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6396 chars]
	I0916 11:12:13.314828 2195229 request.go:632] Waited for 109.246263ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:12:13.314948 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:12:13.314959 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.314969 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.314976 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.317277 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:13.317302 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.317311 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:13.317314 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:13.317319 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.317322 2195229 round_trippers.go:580]     Audit-Id: 9f13e925-c9d3-410f-b04e-64dc0cb2b396
	I0916 11:12:13.317324 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.317327 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.317775 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:12:13.318213 2195229 pod_ready.go:93] pod "kube-proxy-59f9h" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:13.318234 2195229 pod_ready.go:82] duration metric: took 117.940082ms for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.318265 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.514672 2195229 request.go:632] Waited for 196.325656ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:12:13.514768 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:12:13.514788 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.514797 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.514803 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.517472 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:13.517592 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.517633 2195229 round_trippers.go:580]     Audit-Id: f9ca743b-0a94-40ec-b681-fc6744e270b8
	I0916 11:12:13.517646 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.517650 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.517652 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:13.517655 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:13.517658 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.517791 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"759","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6388 chars]
	I0916 11:12:13.714863 2195229 request.go:632] Waited for 196.453803ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:13.714935 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:13.714951 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.714961 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.714973 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.717279 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:13.717345 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.717368 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.717388 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.717415 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:13.717436 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:13.717453 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.717469 2195229 round_trippers.go:580]     Audit-Id: 0b40c2dc-8b3b-4701-90bd-588c60c32166
	I0916 11:12:13.717719 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:13.718128 2195229 pod_ready.go:93] pod "kube-proxy-fm5qr" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:13.718146 2195229 pod_ready.go:82] duration metric: took 399.873418ms for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.718158 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-vl27g" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:13.913919 2195229 request.go:632] Waited for 195.694611ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:12:13.913994 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:12:13.914002 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:13.914011 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:13.914026 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:13.916660 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:13.916701 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:13.916711 2195229 round_trippers.go:580]     Audit-Id: 1ace47a6-b230-4aa3-83e6-ef37e78122ed
	I0916 11:12:13.916714 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:13.916718 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:13.916720 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:13.916723 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:13.916726 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:13 GMT
	I0916 11:12:13.916873 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"945","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6396 chars]
	I0916 11:12:14.114574 2195229 request.go:632] Waited for 197.161778ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:12:14.114760 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:12:14.114771 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:14.114789 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:14.114793 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:14.119064 2195229 round_trippers.go:574] Response Status: 404 Not Found in 4 milliseconds
	I0916 11:12:14.119086 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:14.119095 2195229 round_trippers.go:580]     Audit-Id: 1c014d16-fff9-48d7-ad5a-feaf9c366b3e
	I0916 11:12:14.119100 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:14.119104 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:14.119107 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:14.119111 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:14.119114 2195229 round_trippers.go:580]     Content-Length: 210
	I0916 11:12:14.119116 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:14 GMT
	I0916 11:12:14.119297 2195229 request.go:1351] Response Body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"nodes \"multinode-890146-m03\" not found","reason":"NotFound","details":{"name":"multinode-890146-m03","kind":"nodes"},"code":404}
	I0916 11:12:14.119583 2195229 pod_ready.go:98] node "multinode-890146-m03" hosting pod "kube-proxy-vl27g" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "multinode-890146-m03": nodes "multinode-890146-m03" not found
	I0916 11:12:14.119622 2195229 pod_ready.go:82] duration metric: took 401.456007ms for pod "kube-proxy-vl27g" in "kube-system" namespace to be "Ready" ...
	E0916 11:12:14.119654 2195229 pod_ready.go:67] WaitExtra: waitPodCondition: node "multinode-890146-m03" hosting pod "kube-proxy-vl27g" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "multinode-890146-m03": nodes "multinode-890146-m03" not found
	I0916 11:12:14.119677 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:14.314633 2195229 request.go:632] Waited for 194.857792ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:14.314771 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:14.314835 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:14.314858 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:14.314878 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:14.318922 2195229 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:12:14.318947 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:14.318956 2195229 round_trippers.go:580]     Audit-Id: fd094ba9-eac4-4eb0-ac71-77b79de5e556
	I0916 11:12:14.318961 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:14.318963 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:14.318988 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:14.318993 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:14.318996 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:14 GMT
	I0916 11:12:14.319365 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"985","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5432 chars]
	I0916 11:12:14.514081 2195229 request.go:632] Waited for 194.152031ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:14.514195 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:14.514220 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:14.514261 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:14.514279 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:14.519181 2195229 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:12:14.519258 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:14.519284 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:14.519332 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:14.519357 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:14 GMT
	I0916 11:12:14.519375 2195229 round_trippers.go:580]     Audit-Id: e96598ad-48d8-434a-b36b-f572f95adf25
	I0916 11:12:14.519407 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:14.519428 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:14.519569 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:14.714846 2195229 request.go:632] Waited for 94.161692ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:14.714963 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:14.715015 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:14.715044 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:14.715064 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:14.717575 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:14.717675 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:14.717699 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:14.717727 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:14.717754 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:14.717771 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:14 GMT
	I0916 11:12:14.717788 2195229 round_trippers.go:580]     Audit-Id: dbec6bf1-91b4-4a21-887e-fa63242d71df
	I0916 11:12:14.717811 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:14.717958 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"985","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5432 chars]
	I0916 11:12:14.914811 2195229 request.go:632] Waited for 196.310682ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:14.914949 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:14.914969 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:14.915007 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:14.915027 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:14.917538 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:14.917610 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:14.917633 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:14.917652 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:14.917668 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:14.917693 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:14.917716 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:14 GMT
	I0916 11:12:14.917734 2195229 round_trippers.go:580]     Audit-Id: 4c106e6b-ad81-48d2-b2c1-63f8c92c0c8d
	I0916 11:12:14.917962 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:15.143960 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:15.144043 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:15.144067 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:15.144087 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:15.148147 2195229 round_trippers.go:574] Response Status: 200 OK in 4 milliseconds
	I0916 11:12:15.148268 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:15.148294 2195229 round_trippers.go:580]     Audit-Id: cfae6d36-55cd-43aa-a059-cba9a41d00e5
	I0916 11:12:15.148322 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:15.148349 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:15.148367 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:15.148396 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:15.148413 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:15 GMT
	I0916 11:12:15.148557 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"985","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5432 chars]
	I0916 11:12:15.314313 2195229 request.go:632] Waited for 165.203807ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:15.314422 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:15.314444 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:15.314488 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:15.314507 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:15.318132 2195229 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:12:15.318203 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:15.318225 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:15.318244 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:15.318259 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:15 GMT
	I0916 11:12:15.318285 2195229 round_trippers.go:580]     Audit-Id: 8438fed4-733e-4581-bf36-449e11875363
	I0916 11:12:15.318307 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:15.318324 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:15.318940 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:15.620258 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:15.620333 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:15.620365 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:15.620387 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:15.623146 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:15.623214 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:15.623237 2195229 round_trippers.go:580]     Audit-Id: 80c5a5ad-29e1-4e2a-94f3-e79fab897920
	I0916 11:12:15.623257 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:15.623282 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:15.623307 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:15.623324 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:15.623342 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:15 GMT
	I0916 11:12:15.623929 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"985","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5432 chars]
	I0916 11:12:15.714754 2195229 request.go:632] Waited for 90.258846ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:15.714857 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:15.714871 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:15.714881 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:15.714887 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:15.717072 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:15.717107 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:15.717116 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:15.717120 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:15.717125 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:15.717128 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:15 GMT
	I0916 11:12:15.717131 2195229 round_trippers.go:580]     Audit-Id: 213b95a4-fbf5-4178-ab29-0853dbdfb82c
	I0916 11:12:15.717134 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:15.717341 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:16.120800 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:16.120827 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:16.120836 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:16.120840 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:16.123344 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:16.123372 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:16.123380 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:16 GMT
	I0916 11:12:16.123386 2195229 round_trippers.go:580]     Audit-Id: e3dc4129-fd31-4040-8a84-d728952c89b2
	I0916 11:12:16.123417 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:16.123428 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:16.123430 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:16.123433 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:16.123725 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"985","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5432 chars]
	I0916 11:12:16.124227 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:16.124244 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:16.124251 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:16.124255 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:16.126385 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:16.126464 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:16.126496 2195229 round_trippers.go:580]     Audit-Id: 1aa68b51-890b-4c61-9d7c-70f42c93565c
	I0916 11:12:16.126546 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:16.126573 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:16.126582 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:16.126586 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:16.126589 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:16 GMT
	I0916 11:12:16.126821 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:16.127325 2195229 pod_ready.go:103] pod "kube-scheduler-multinode-890146" in "kube-system" namespace has status "Ready":"False"
	I0916 11:12:16.620710 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:16.620735 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:16.620745 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:16.620751 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:16.622834 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:16.622870 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:16.622878 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:16.622884 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:16.622887 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:16.622889 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:16 GMT
	I0916 11:12:16.622892 2195229 round_trippers.go:580]     Audit-Id: d968e0f9-db8f-4d66-aea8-2e93f6e56546
	I0916 11:12:16.622896 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:16.623131 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"985","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5432 chars]
	I0916 11:12:16.623587 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:16.623609 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:16.623617 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:16.623622 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:16.625700 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:16.625724 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:16.625733 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:16.625736 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:16.625740 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:16.625744 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:16 GMT
	I0916 11:12:16.625747 2195229 round_trippers.go:580]     Audit-Id: 1ef084af-aa47-4bfe-8542-bd2781f16739
	I0916 11:12:16.625749 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:16.625978 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:17.120698 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:17.120727 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:17.120737 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:17.120742 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:17.123154 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:17.123179 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:17.123188 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:17.123193 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:17 GMT
	I0916 11:12:17.123197 2195229 round_trippers.go:580]     Audit-Id: 6f857a8b-93d0-4266-ba37-17da4c9f7c72
	I0916 11:12:17.123201 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:17.123204 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:17.123207 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:17.123482 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"985","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5432 chars]
	I0916 11:12:17.123948 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:17.123964 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:17.123972 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:17.123984 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:17.126250 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:17.126273 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:17.126282 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:17.126287 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:17.126293 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:17.126296 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:17.126300 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:17 GMT
	I0916 11:12:17.126302 2195229 round_trippers.go:580]     Audit-Id: 52460235-2dcd-47cf-a9f6-e58c15bbabde
	I0916 11:12:17.126661 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:17.619989 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:17.620019 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:17.620034 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:17.620041 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:17.622453 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:17.622476 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:17.622485 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:17.622489 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:17.622493 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:17.622496 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:17.622499 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:17 GMT
	I0916 11:12:17.622502 2195229 round_trippers.go:580]     Audit-Id: de662999-fef1-41de-a503-e1c19fe205d0
	I0916 11:12:17.622769 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"985","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5432 chars]
	I0916 11:12:17.623243 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:17.623254 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:17.623263 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:17.623267 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:17.625289 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:17.625354 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:17.625398 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:17.625440 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:17.625458 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:17.625474 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:17 GMT
	I0916 11:12:17.625504 2195229 round_trippers.go:580]     Audit-Id: fbedf2a4-89cb-4232-97d4-e8e8907b43dc
	I0916 11:12:17.625524 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:17.625670 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:18.119994 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:18.120024 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:18.120031 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:18.120035 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:18.122387 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:18.122422 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:18.122431 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:18.122463 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:18.122467 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:18.122470 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:18 GMT
	I0916 11:12:18.122481 2195229 round_trippers.go:580]     Audit-Id: 5eee9574-4d43-4661-969f-ea9b5d285da5
	I0916 11:12:18.122483 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:18.122658 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"985","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5432 chars]
	I0916 11:12:18.123239 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:18.123259 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:18.123269 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:18.123274 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:18.125323 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:18.125341 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:18.125349 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:18 GMT
	I0916 11:12:18.125353 2195229 round_trippers.go:580]     Audit-Id: cded7d06-fc11-41a4-b5c0-34202484effe
	I0916 11:12:18.125356 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:18.125358 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:18.125361 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:18.125364 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:18.125494 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:18.620757 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:18.620785 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:18.620796 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:18.620800 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:18.623208 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:18.623271 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:18.623295 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:18.623355 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:18.623380 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:18.623399 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:18.623410 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:18 GMT
	I0916 11:12:18.623415 2195229 round_trippers.go:580]     Audit-Id: e45cbc6d-5599-43b4-86b3-25788c5ededb
	I0916 11:12:18.623545 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"985","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5432 chars]
	I0916 11:12:18.624040 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:18.624055 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:18.624063 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:18.624068 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:18.626340 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:18.626363 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:18.626372 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:18.626377 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:18.626382 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:18.626384 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:18.626387 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:18 GMT
	I0916 11:12:18.626390 2195229 round_trippers.go:580]     Audit-Id: 6583a92f-3f96-427b-89ff-3066c62860ac
	I0916 11:12:18.626578 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:18.627010 2195229 pod_ready.go:103] pod "kube-scheduler-multinode-890146" in "kube-system" namespace has status "Ready":"False"
	I0916 11:12:19.120797 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:19.120822 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:19.120832 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:19.120837 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:19.123301 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:19.123378 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:19.123401 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:19.123420 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:19.123432 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:19 GMT
	I0916 11:12:19.123452 2195229 round_trippers.go:580]     Audit-Id: 2bf38f47-e428-42f8-8a6f-98b85b04782c
	I0916 11:12:19.123455 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:19.123476 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:19.123636 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"985","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{},
"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{} [truncated 5432 chars]
	I0916 11:12:19.124143 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:19.124162 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:19.124171 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:19.124177 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:19.126323 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:19.126348 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:19.126357 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:19.126362 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:19.126365 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:19.126369 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:19 GMT
	I0916 11:12:19.126372 2195229 round_trippers.go:580]     Audit-Id: 69ef95fa-6e97-4565-a7c2-986f013c189a
	I0916 11:12:19.126376 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:19.126615 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:19.620852 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:19.620877 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:19.620887 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:19.620891 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:19.623415 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:19.623443 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:19.623451 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:19.623455 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:19.623459 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:19.623462 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:19.623464 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:19 GMT
	I0916 11:12:19.623466 2195229 round_trippers.go:580]     Audit-Id: 17ceeb5d-e5c9-4a3c-bd3e-526739e1d125
	I0916 11:12:19.623632 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"1068","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{}
,"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{ [truncated 5189 chars]
	I0916 11:12:19.624127 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:19.624137 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:19.624145 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:19.624150 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:19.626425 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:19.626447 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:19.626499 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:19 GMT
	I0916 11:12:19.626511 2195229 round_trippers.go:580]     Audit-Id: 74673fcd-fa5f-488c-9c73-8881d2b58f15
	I0916 11:12:19.626516 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:19.626519 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:19.626523 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:19.626533 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:19.626946 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:19.627336 2195229 pod_ready.go:93] pod "kube-scheduler-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:19.627362 2195229 pod_ready.go:82] duration metric: took 5.507639592s for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:19.627378 2195229 pod_ready.go:39] duration metric: took 6.513690638s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:12:19.627397 2195229 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:12:19.627461 2195229 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:12:19.637567 2195229 command_runner.go:130] > 1109
	I0916 11:12:19.638832 2195229 api_server.go:72] duration metric: took 11.074018598s to wait for apiserver process to appear ...
	I0916 11:12:19.638852 2195229 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:12:19.638872 2195229 api_server.go:253] Checking apiserver healthz at https://192.168.58.2:8443/healthz ...
	I0916 11:12:19.647622 2195229 api_server.go:279] https://192.168.58.2:8443/healthz returned 200:
	ok
	I0916 11:12:19.647702 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/version
	I0916 11:12:19.647714 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:19.647725 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:19.647733 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:19.648721 2195229 round_trippers.go:574] Response Status: 200 OK in 0 milliseconds
	I0916 11:12:19.648749 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:19.648757 2195229 round_trippers.go:580]     Content-Length: 263
	I0916 11:12:19.648763 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:19 GMT
	I0916 11:12:19.648766 2195229 round_trippers.go:580]     Audit-Id: bf3cb542-45ca-4dca-8bc7-1b39abe72407
	I0916 11:12:19.648770 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:19.648774 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:19.648779 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:19.648787 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:19.648803 2195229 request.go:1351] Response Body: {
	  "major": "1",
	  "minor": "31",
	  "gitVersion": "v1.31.1",
	  "gitCommit": "948afe5ca072329a73c8e79ed5938717a5cb3d21",
	  "gitTreeState": "clean",
	  "buildDate": "2024-09-11T21:22:08Z",
	  "goVersion": "go1.22.6",
	  "compiler": "gc",
	  "platform": "linux/arm64"
	}
	I0916 11:12:19.648906 2195229 api_server.go:141] control plane version: v1.31.1
	I0916 11:12:19.648925 2195229 api_server.go:131] duration metric: took 10.066513ms to wait for apiserver health ...
	I0916 11:12:19.648933 2195229 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:12:19.648988 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:12:19.649001 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:19.649008 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:19.649013 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:19.652056 2195229 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:12:19.652077 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:19.652085 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:19.652090 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:19.652093 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:19.652095 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:19 GMT
	I0916 11:12:19.652113 2195229 round_trippers.go:580]     Audit-Id: 34b55a77-3f35-422f-9bd9-ea476a2f3213
	I0916 11:12:19.652119 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:19.653206 2195229 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"1068"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},
"f:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers" [truncated 91434 chars]
	I0916 11:12:19.659483 2195229 system_pods.go:59] 12 kube-system pods found
	I0916 11:12:19.659532 2195229 system_pods.go:61] "coredns-7c65d6cfc9-vp22b" [a6adb735-448b-480b-aba1-3ce4d56c6fc7] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 11:12:19.659542 2195229 system_pods.go:61] "etcd-multinode-890146" [59c960ab-f6dd-4ed3-856c-ba2a02295b12] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 11:12:19.659569 2195229 system_pods.go:61] "kindnet-4sjj6" [0ee7a4e2-91b7-4249-9e02-04f1ae301cce] Running
	I0916 11:12:19.659584 2195229 system_pods.go:61] "kindnet-dbrhk" [24ef6be7-a1ab-41f7-83c8-aa5af5007281] Running
	I0916 11:12:19.659589 2195229 system_pods.go:61] "kindnet-ndgrk" [05cf469d-f130-4d9e-9540-6c8ae5be1e57] Running
	I0916 11:12:19.659596 2195229 system_pods.go:61] "kube-apiserver-multinode-890146" [9846229d-7227-453f-8c30-697aaba61648] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 11:12:19.659604 2195229 system_pods.go:61] "kube-controller-manager-multinode-890146" [421b15a5-a8e2-4631-bf18-1592c8747b09] Running
	I0916 11:12:19.659609 2195229 system_pods.go:61] "kube-proxy-59f9h" [a9a614fd-3de3-4fa0-b773-5d6a6054d0ea] Running
	I0916 11:12:19.659612 2195229 system_pods.go:61] "kube-proxy-fm5qr" [8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73] Running
	I0916 11:12:19.659616 2195229 system_pods.go:61] "kube-proxy-vl27g" [8f7e9a8c-6e70-4445-b85e-11c5c03701be] Running
	I0916 11:12:19.659620 2195229 system_pods.go:61] "kube-scheduler-multinode-890146" [6d440ef3-2e6e-4db9-8c28-2417f7a80c9f] Running
	I0916 11:12:19.659624 2195229 system_pods.go:61] "storage-provisioner" [97795413-5c7a-480b-9cbd-18d4dea5669b] Running
	I0916 11:12:19.659637 2195229 system_pods.go:74] duration metric: took 10.691076ms to wait for pod list to return data ...
	I0916 11:12:19.659647 2195229 default_sa.go:34] waiting for default service account to be created ...
	I0916 11:12:19.659733 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/default/serviceaccounts
	I0916 11:12:19.659743 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:19.659751 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:19.659758 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:19.662423 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:19.662441 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:19.662449 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:19.662454 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:19.662457 2195229 round_trippers.go:580]     Content-Length: 262
	I0916 11:12:19.662460 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:19 GMT
	I0916 11:12:19.662462 2195229 round_trippers.go:580]     Audit-Id: eebd700e-2ae0-4b6d-97e3-409e998a0193
	I0916 11:12:19.662465 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:19.662467 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:19.662487 2195229 request.go:1351] Response Body: {"kind":"ServiceAccountList","apiVersion":"v1","metadata":{"resourceVersion":"1068"},"items":[{"metadata":{"name":"default","namespace":"default","uid":"05c19a8a-7c83-4ce8-b18d-3bc9431ca644","resourceVersion":"353","creationTimestamp":"2024-09-16T11:07:37Z"}}]}
	I0916 11:12:19.662642 2195229 default_sa.go:45] found service account: "default"
	I0916 11:12:19.662653 2195229 default_sa.go:55] duration metric: took 2.999844ms for default service account to be created ...
	I0916 11:12:19.662661 2195229 system_pods.go:116] waiting for k8s-apps to be running ...
	I0916 11:12:19.662779 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:12:19.662786 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:19.662793 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:19.662799 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:19.665725 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:19.665748 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:19.665756 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:19.665759 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:19 GMT
	I0916 11:12:19.665762 2195229 round_trippers.go:580]     Audit-Id: ed06bdd8-c99e-4817-ba24-59dd232b7c68
	I0916 11:12:19.665765 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:19.665768 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:19.665777 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:19.666403 2195229 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"1068"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},
"f:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers" [truncated 91434 chars]
	I0916 11:12:19.670357 2195229 system_pods.go:86] 12 kube-system pods found
	I0916 11:12:19.670401 2195229 system_pods.go:89] "coredns-7c65d6cfc9-vp22b" [a6adb735-448b-480b-aba1-3ce4d56c6fc7] Running / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0916 11:12:19.670411 2195229 system_pods.go:89] "etcd-multinode-890146" [59c960ab-f6dd-4ed3-856c-ba2a02295b12] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 11:12:19.670422 2195229 system_pods.go:89] "kindnet-4sjj6" [0ee7a4e2-91b7-4249-9e02-04f1ae301cce] Running
	I0916 11:12:19.670427 2195229 system_pods.go:89] "kindnet-dbrhk" [24ef6be7-a1ab-41f7-83c8-aa5af5007281] Running
	I0916 11:12:19.670432 2195229 system_pods.go:89] "kindnet-ndgrk" [05cf469d-f130-4d9e-9540-6c8ae5be1e57] Running
	I0916 11:12:19.670442 2195229 system_pods.go:89] "kube-apiserver-multinode-890146" [9846229d-7227-453f-8c30-697aaba61648] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 11:12:19.670447 2195229 system_pods.go:89] "kube-controller-manager-multinode-890146" [421b15a5-a8e2-4631-bf18-1592c8747b09] Running
	I0916 11:12:19.670461 2195229 system_pods.go:89] "kube-proxy-59f9h" [a9a614fd-3de3-4fa0-b773-5d6a6054d0ea] Running
	I0916 11:12:19.670466 2195229 system_pods.go:89] "kube-proxy-fm5qr" [8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73] Running
	I0916 11:12:19.670470 2195229 system_pods.go:89] "kube-proxy-vl27g" [8f7e9a8c-6e70-4445-b85e-11c5c03701be] Running
	I0916 11:12:19.670476 2195229 system_pods.go:89] "kube-scheduler-multinode-890146" [6d440ef3-2e6e-4db9-8c28-2417f7a80c9f] Running
	I0916 11:12:19.670480 2195229 system_pods.go:89] "storage-provisioner" [97795413-5c7a-480b-9cbd-18d4dea5669b] Running
	I0916 11:12:19.670493 2195229 system_pods.go:126] duration metric: took 7.827518ms to wait for k8s-apps to be running ...
	I0916 11:12:19.670507 2195229 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:12:19.670572 2195229 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:12:19.682864 2195229 system_svc.go:56] duration metric: took 12.346878ms WaitForService to wait for kubelet
	I0916 11:12:19.682950 2195229 kubeadm.go:582] duration metric: took 11.118138545s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:12:19.682986 2195229 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:12:19.714345 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes
	I0916 11:12:19.714370 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:19.714379 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:19.714385 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:19.717079 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:19.717150 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:19.717172 2195229 round_trippers.go:580]     Audit-Id: a9db0153-baaf-4823-b720-438b849e6f55
	I0916 11:12:19.717190 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:19.717203 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:19.717233 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:19.717252 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:19.717268 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:19 GMT
	I0916 11:12:19.717527 2195229 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"1068"},"items":[{"metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"mana
gedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1" [truncated 11675 chars]
	I0916 11:12:19.718268 2195229 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:12:19.718294 2195229 node_conditions.go:123] node cpu capacity is 2
	I0916 11:12:19.718309 2195229 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:12:19.718321 2195229 node_conditions.go:123] node cpu capacity is 2
	I0916 11:12:19.718327 2195229 node_conditions.go:105] duration metric: took 35.311438ms to run NodePressure ...
	I0916 11:12:19.718345 2195229 start.go:241] waiting for startup goroutines ...
	I0916 11:12:19.718353 2195229 start.go:246] waiting for cluster config update ...
	I0916 11:12:19.718364 2195229 start.go:255] writing updated cluster config ...
	I0916 11:12:19.721226 2195229 out.go:201] 
	I0916 11:12:19.723300 2195229 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:12:19.723459 2195229 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:12:19.725742 2195229 out.go:177] * Starting "multinode-890146-m02" worker node in "multinode-890146" cluster
	I0916 11:12:19.727582 2195229 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:12:19.729641 2195229 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:12:19.731358 2195229 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:12:19.731386 2195229 cache.go:56] Caching tarball of preloaded images
	I0916 11:12:19.731425 2195229 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:12:19.731488 2195229 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 11:12:19.731504 2195229 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 11:12:19.731631 2195229 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	W0916 11:12:19.758718 2195229 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:12:19.758739 2195229 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:12:19.758819 2195229 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:12:19.758837 2195229 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:12:19.758841 2195229 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:12:19.758849 2195229 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:12:19.758854 2195229 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:12:19.760261 2195229 image.go:273] response: 
	I0916 11:12:19.939060 2195229 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:12:19.939104 2195229 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:12:19.939133 2195229 start.go:360] acquireMachinesLock for multinode-890146-m02: {Name:mkb193e5e8454b4e97e0a3d9e40e1ee2de147629 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:12:19.939198 2195229 start.go:364] duration metric: took 42.626µs to acquireMachinesLock for "multinode-890146-m02"
	I0916 11:12:19.939222 2195229 start.go:96] Skipping create...Using existing machine configuration
	I0916 11:12:19.939230 2195229 fix.go:54] fixHost starting: m02
	I0916 11:12:19.939513 2195229 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:12:19.957485 2195229 fix.go:112] recreateIfNeeded on multinode-890146-m02: state=Stopped err=<nil>
	W0916 11:12:19.957513 2195229 fix.go:138] unexpected machine state, will restart: <nil>
	I0916 11:12:19.959803 2195229 out.go:177] * Restarting existing docker container for "multinode-890146-m02" ...
	I0916 11:12:19.962031 2195229 cli_runner.go:164] Run: docker start multinode-890146-m02
	I0916 11:12:20.296149 2195229 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:12:20.326816 2195229 kic.go:430] container "multinode-890146-m02" state is running.
	I0916 11:12:20.328480 2195229 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:12:20.351799 2195229 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/config.json ...
	I0916 11:12:20.352093 2195229 machine.go:93] provisionDockerMachine start ...
	I0916 11:12:20.352168 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:12:20.376340 2195229 main.go:141] libmachine: Using SSH client type: native
	I0916 11:12:20.376592 2195229 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40757 <nil> <nil>}
	I0916 11:12:20.376603 2195229 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:12:20.377227 2195229 main.go:141] libmachine: Error dialing TCP: ssh: handshake failed: EOF
	I0916 11:12:23.518118 2195229 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146-m02
	
	I0916 11:12:23.518144 2195229 ubuntu.go:169] provisioning hostname "multinode-890146-m02"
	I0916 11:12:23.518213 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:12:23.535620 2195229 main.go:141] libmachine: Using SSH client type: native
	I0916 11:12:23.535861 2195229 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40757 <nil> <nil>}
	I0916 11:12:23.535878 2195229 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-890146-m02 && echo "multinode-890146-m02" | sudo tee /etc/hostname
	I0916 11:12:23.683661 2195229 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-890146-m02
	
	I0916 11:12:23.683748 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:12:23.701371 2195229 main.go:141] libmachine: Using SSH client type: native
	I0916 11:12:23.701621 2195229 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40757 <nil> <nil>}
	I0916 11:12:23.701644 2195229 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-890146-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-890146-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-890146-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:12:23.838890 2195229 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:12:23.838987 2195229 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 11:12:23.839020 2195229 ubuntu.go:177] setting up certificates
	I0916 11:12:23.839064 2195229 provision.go:84] configureAuth start
	I0916 11:12:23.839169 2195229 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:12:23.857177 2195229 provision.go:143] copyHostCerts
	I0916 11:12:23.857217 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:12:23.857249 2195229 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 11:12:23.857256 2195229 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:12:23.857338 2195229 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 11:12:23.857428 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:12:23.857445 2195229 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 11:12:23.857451 2195229 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:12:23.857477 2195229 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 11:12:23.857520 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:12:23.857549 2195229 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 11:12:23.857554 2195229 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:12:23.857582 2195229 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 11:12:23.857648 2195229 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.multinode-890146-m02 san=[127.0.0.1 192.168.58.3 localhost minikube multinode-890146-m02]
	I0916 11:12:24.207408 2195229 provision.go:177] copyRemoteCerts
	I0916 11:12:24.207476 2195229 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:12:24.207528 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:12:24.224619 2195229 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40757 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:12:24.327831 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0916 11:12:24.327895 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 11:12:24.354641 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0916 11:12:24.354796 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1229 bytes)
	I0916 11:12:24.383539 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0916 11:12:24.383623 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:12:24.409539 2195229 provision.go:87] duration metric: took 570.446758ms to configureAuth
	I0916 11:12:24.409574 2195229 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:12:24.409838 2195229 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:12:24.409852 2195229 machine.go:96] duration metric: took 4.057750363s to provisionDockerMachine
	I0916 11:12:24.409861 2195229 start.go:293] postStartSetup for "multinode-890146-m02" (driver="docker")
	I0916 11:12:24.409872 2195229 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:12:24.409927 2195229 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:12:24.409973 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:12:24.427478 2195229 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40757 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:12:24.524237 2195229 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:12:24.527605 2195229 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.4 LTS"
	I0916 11:12:24.527625 2195229 command_runner.go:130] > NAME="Ubuntu"
	I0916 11:12:24.527633 2195229 command_runner.go:130] > VERSION_ID="22.04"
	I0916 11:12:24.527638 2195229 command_runner.go:130] > VERSION="22.04.4 LTS (Jammy Jellyfish)"
	I0916 11:12:24.527643 2195229 command_runner.go:130] > VERSION_CODENAME=jammy
	I0916 11:12:24.527647 2195229 command_runner.go:130] > ID=ubuntu
	I0916 11:12:24.527651 2195229 command_runner.go:130] > ID_LIKE=debian
	I0916 11:12:24.527655 2195229 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0916 11:12:24.527662 2195229 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0916 11:12:24.527668 2195229 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0916 11:12:24.527676 2195229 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0916 11:12:24.527680 2195229 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0916 11:12:24.527750 2195229 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:12:24.527774 2195229 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:12:24.527784 2195229 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:12:24.527792 2195229 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:12:24.527802 2195229 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 11:12:24.527863 2195229 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 11:12:24.527938 2195229 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 11:12:24.527945 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /etc/ssl/certs/20633262.pem
	I0916 11:12:24.528045 2195229 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:12:24.537131 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:12:24.564709 2195229 start.go:296] duration metric: took 154.831678ms for postStartSetup
	I0916 11:12:24.564819 2195229 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:12:24.564896 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:12:24.582054 2195229 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40757 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:12:24.676194 2195229 command_runner.go:130] > 21%
	I0916 11:12:24.676309 2195229 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:12:24.680837 2195229 command_runner.go:130] > 154G
	I0916 11:12:24.680874 2195229 fix.go:56] duration metric: took 4.74163167s for fixHost
	I0916 11:12:24.680885 2195229 start.go:83] releasing machines lock for "multinode-890146-m02", held for 4.741676576s
	I0916 11:12:24.680961 2195229 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:12:24.700758 2195229 out.go:177] * Found network options:
	I0916 11:12:24.702993 2195229 out.go:177]   - NO_PROXY=192.168.58.2
	W0916 11:12:24.705317 2195229 proxy.go:119] fail to check proxy env: Error ip not in block
	W0916 11:12:24.705352 2195229 proxy.go:119] fail to check proxy env: Error ip not in block
	I0916 11:12:24.705424 2195229 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:12:24.705473 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:12:24.705759 2195229 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:12:24.705816 2195229 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:12:24.724941 2195229 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40757 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:12:24.733190 2195229 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40757 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:12:24.823180 2195229 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0916 11:12:24.823206 2195229 command_runner.go:130] >   Size: 78        	Blocks: 8          IO Block: 4096   regular file
	I0916 11:12:24.823218 2195229 command_runner.go:130] > Device: 10000ah/1048586d	Inode: 1335879     Links: 1
	I0916 11:12:24.823226 2195229 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:12:24.823233 2195229 command_runner.go:130] > Access: 2024-09-16 11:12:20.830736430 +0000
	I0916 11:12:24.823238 2195229 command_runner.go:130] > Modify: 2024-09-16 11:10:44.447259212 +0000
	I0916 11:12:24.823243 2195229 command_runner.go:130] > Change: 2024-09-16 11:10:44.447259212 +0000
	I0916 11:12:24.823249 2195229 command_runner.go:130] >  Birth: 2024-09-16 11:10:44.447259212 +0000
	I0916 11:12:24.823920 2195229 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 11:12:24.953034 2195229 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0916 11:12:24.956414 2195229 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:12:24.956497 2195229 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:12:24.965878 2195229 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0916 11:12:24.965945 2195229 start.go:495] detecting cgroup driver to use...
	I0916 11:12:24.965994 2195229 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:12:24.966051 2195229 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 11:12:24.979897 2195229 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 11:12:24.992955 2195229 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:12:24.993065 2195229 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:12:25.021532 2195229 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:12:25.040729 2195229 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:12:25.147695 2195229 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:12:25.243670 2195229 docker.go:233] disabling docker service ...
	I0916 11:12:25.243811 2195229 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:12:25.258232 2195229 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:12:25.270396 2195229 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:12:25.359693 2195229 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:12:25.449751 2195229 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:12:25.462304 2195229 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:12:25.478091 2195229 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0916 11:12:25.480051 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0916 11:12:25.491291 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 11:12:25.502546 2195229 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 11:12:25.502711 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 11:12:25.514883 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:12:25.528535 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 11:12:25.538488 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:12:25.550391 2195229 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:12:25.560134 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 11:12:25.572248 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 11:12:25.582199 2195229 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 11:12:25.593405 2195229 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:12:25.601373 2195229 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0916 11:12:25.602502 2195229 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:12:25.611632 2195229 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:12:25.703040 2195229 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 11:12:25.861061 2195229 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 11:12:25.861216 2195229 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 11:12:25.866185 2195229 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0916 11:12:25.866211 2195229 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0916 11:12:25.866219 2195229 command_runner.go:130] > Device: 100014h/1048596d	Inode: 172         Links: 1
	I0916 11:12:25.866226 2195229 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0916 11:12:25.866250 2195229 command_runner.go:130] > Access: 2024-09-16 11:12:25.834709060 +0000
	I0916 11:12:25.866261 2195229 command_runner.go:130] > Modify: 2024-09-16 11:12:25.794709279 +0000
	I0916 11:12:25.866267 2195229 command_runner.go:130] > Change: 2024-09-16 11:12:25.794709279 +0000
	I0916 11:12:25.866270 2195229 command_runner.go:130] >  Birth: -
	I0916 11:12:25.866289 2195229 start.go:563] Will wait 60s for crictl version
	I0916 11:12:25.866358 2195229 ssh_runner.go:195] Run: which crictl
	I0916 11:12:25.871263 2195229 command_runner.go:130] > /usr/bin/crictl
	I0916 11:12:25.871468 2195229 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:12:25.918824 2195229 command_runner.go:130] > Version:  0.1.0
	I0916 11:12:25.919058 2195229 command_runner.go:130] > RuntimeName:  containerd
	I0916 11:12:25.919127 2195229 command_runner.go:130] > RuntimeVersion:  1.7.22
	I0916 11:12:25.919367 2195229 command_runner.go:130] > RuntimeApiVersion:  v1
	I0916 11:12:25.922016 2195229 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 11:12:25.922135 2195229 ssh_runner.go:195] Run: containerd --version
	I0916 11:12:25.953072 2195229 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:12:25.954986 2195229 ssh_runner.go:195] Run: containerd --version
	I0916 11:12:25.979542 2195229 command_runner.go:130] > containerd containerd.io 1.7.22 7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c
	I0916 11:12:25.985972 2195229 out.go:177] * Preparing Kubernetes v1.31.1 on containerd 1.7.22 ...
	I0916 11:12:25.989177 2195229 out.go:177]   - env NO_PROXY=192.168.58.2
	I0916 11:12:25.991132 2195229 cli_runner.go:164] Run: docker network inspect multinode-890146 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:12:26.010242 2195229 ssh_runner.go:195] Run: grep 192.168.58.1	host.minikube.internal$ /etc/hosts
	I0916 11:12:26.014943 2195229 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.58.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:12:26.027611 2195229 mustload.go:65] Loading cluster: multinode-890146
	I0916 11:12:26.027870 2195229 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:12:26.028167 2195229 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:12:26.048767 2195229 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:12:26.049062 2195229 certs.go:68] Setting up /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146 for IP: 192.168.58.3
	I0916 11:12:26.049071 2195229 certs.go:194] generating shared ca certs ...
	I0916 11:12:26.049122 2195229 certs.go:226] acquiring lock for ca certs: {Name:mk1e273a7197d918ea18a75720b6b6301020c485 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:12:26.049252 2195229 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key
	I0916 11:12:26.049294 2195229 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key
	I0916 11:12:26.049305 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0916 11:12:26.049318 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0916 11:12:26.049328 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0916 11:12:26.049339 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0916 11:12:26.049396 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem (1338 bytes)
	W0916 11:12:26.049425 2195229 certs.go:480] ignoring /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326_empty.pem, impossibly tiny 0 bytes
	I0916 11:12:26.049434 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem (1679 bytes)
	I0916 11:12:26.049469 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem (1082 bytes)
	I0916 11:12:26.049497 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem (1123 bytes)
	I0916 11:12:26.049522 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem (1675 bytes)
	I0916 11:12:26.049567 2195229 certs.go:484] found cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:12:26.049595 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> /usr/share/ca-certificates/20633262.pem
	I0916 11:12:26.049608 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:12:26.049620 2195229 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem -> /usr/share/ca-certificates/2063326.pem
	I0916 11:12:26.049640 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0916 11:12:26.075888 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1679 bytes)
	I0916 11:12:26.111399 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0916 11:12:26.136889 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0916 11:12:26.164353 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /usr/share/ca-certificates/20633262.pem (1708 bytes)
	I0916 11:12:26.189378 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0916 11:12:26.214567 2195229 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/2063326.pem --> /usr/share/ca-certificates/2063326.pem (1338 bytes)
	I0916 11:12:26.239840 2195229 ssh_runner.go:195] Run: openssl version
	I0916 11:12:26.244985 2195229 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0916 11:12:26.245407 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0916 11:12:26.256741 2195229 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:12:26.261194 2195229 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:12:26.261267 2195229 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Sep 16 10:30 /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:12:26.261336 2195229 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0916 11:12:26.268130 2195229 command_runner.go:130] > b5213941
	I0916 11:12:26.268582 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0916 11:12:26.278077 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/2063326.pem && ln -fs /usr/share/ca-certificates/2063326.pem /etc/ssl/certs/2063326.pem"
	I0916 11:12:26.287828 2195229 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/2063326.pem
	I0916 11:12:26.291699 2195229 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:12:26.291772 2195229 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Sep 16 10:47 /usr/share/ca-certificates/2063326.pem
	I0916 11:12:26.291839 2195229 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/2063326.pem
	I0916 11:12:26.298460 2195229 command_runner.go:130] > 51391683
	I0916 11:12:26.298951 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/2063326.pem /etc/ssl/certs/51391683.0"
	I0916 11:12:26.308222 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/20633262.pem && ln -fs /usr/share/ca-certificates/20633262.pem /etc/ssl/certs/20633262.pem"
	I0916 11:12:26.318757 2195229 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/20633262.pem
	I0916 11:12:26.322312 2195229 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:12:26.322771 2195229 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Sep 16 10:47 /usr/share/ca-certificates/20633262.pem
	I0916 11:12:26.322848 2195229 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/20633262.pem
	I0916 11:12:26.330058 2195229 command_runner.go:130] > 3ec20f2e
	I0916 11:12:26.330550 2195229 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/20633262.pem /etc/ssl/certs/3ec20f2e.0"
	I0916 11:12:26.339887 2195229 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0916 11:12:26.343378 2195229 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:12:26.343421 2195229 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0916 11:12:26.343454 2195229 kubeadm.go:934] updating node {m02 192.168.58.3 8443 v1.31.1 containerd false true} ...
	I0916 11:12:26.343560 2195229 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.31.1/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-890146-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.58.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.31.1 ClusterName:multinode-890146 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:12:26.343628 2195229 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.31.1
	I0916 11:12:26.353066 2195229 command_runner.go:130] > kubeadm
	I0916 11:12:26.353088 2195229 command_runner.go:130] > kubectl
	I0916 11:12:26.353092 2195229 command_runner.go:130] > kubelet
	I0916 11:12:26.353113 2195229 binaries.go:44] Found k8s binaries, skipping transfer
	I0916 11:12:26.353184 2195229 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0916 11:12:26.364298 2195229 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (324 bytes)
	I0916 11:12:26.383728 2195229 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0916 11:12:26.402613 2195229 ssh_runner.go:195] Run: grep 192.168.58.2	control-plane.minikube.internal$ /etc/hosts
	I0916 11:12:26.406313 2195229 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.58.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:12:26.417462 2195229 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:12:26.516112 2195229 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:12:26.528575 2195229 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.58.3 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:false Worker:true}
	I0916 11:12:26.529031 2195229 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:12:26.531383 2195229 out.go:177] * Verifying Kubernetes components...
	I0916 11:12:26.533538 2195229 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:12:26.624817 2195229 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:12:26.637677 2195229 loader.go:395] Config loaded from file:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:12:26.637976 2195229 kapi.go:59] client config for multinode-890146: &rest.Config{Host:"https://192.168.58.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/multinode-890146/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil),
NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:12:26.638248 2195229 node_ready.go:35] waiting up to 6m0s for node "multinode-890146-m02" to be "Ready" ...
	I0916 11:12:26.638321 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:12:26.638336 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:26.638345 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:26.638350 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:26.640889 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:26.640955 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:26.640980 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:26.640996 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:26.641028 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:26.641047 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:26.641065 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:26 GMT
	I0916 11:12:26.641082 2195229 round_trippers.go:580]     Audit-Id: 7840143f-d979-4e77-87d6-21120cddb3eb
	I0916 11:12:26.641284 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"828","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metada
ta":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}}} [truncated 5312 chars]
	I0916 11:12:26.641671 2195229 node_ready.go:49] node "multinode-890146-m02" has status "Ready":"True"
	I0916 11:12:26.641691 2195229 node_ready.go:38] duration metric: took 3.425778ms for node "multinode-890146-m02" to be "Ready" ...
	I0916 11:12:26.641702 2195229 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:12:26.641770 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods
	I0916 11:12:26.641783 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:26.641791 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:26.641795 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:26.645303 2195229 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:12:26.645326 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:26.645334 2195229 round_trippers.go:580]     Audit-Id: 688078c8-a79c-4db9-93d8-c5d3648d7f60
	I0916 11:12:26.645347 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:26.645374 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:26.645383 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:26.645387 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:26.645389 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:26 GMT
	I0916 11:12:26.646454 2195229 request.go:1351] Response Body: {"kind":"PodList","apiVersion":"v1","metadata":{"resourceVersion":"1073"},"items":[{"metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},
"f:preferredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers" [truncated 91191 chars]
	I0916 11:12:26.650371 2195229 pod_ready.go:79] waiting up to 6m0s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:26.650499 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:26.650511 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:26.650521 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:26.650527 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:26.653006 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:26.653034 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:26.653043 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:26.653049 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:26 GMT
	I0916 11:12:26.653054 2195229 round_trippers.go:580]     Audit-Id: 7026ece0-5187-4f87-939c-d8dc70ac69bb
	I0916 11:12:26.653057 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:26.653060 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:26.653063 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:26.653348 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:26.653910 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:26.653929 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:26.653938 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:26.653950 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:26.656025 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:26.656061 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:26.656070 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:26.656074 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:26.656077 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:26 GMT
	I0916 11:12:26.656079 2195229 round_trippers.go:580]     Audit-Id: 24b6df58-d41b-4b6d-ba0a-9d0e25581acf
	I0916 11:12:26.656083 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:26.656086 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:26.656411 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:27.151485 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:27.151511 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:27.151522 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:27.151527 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:27.153938 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:27.154007 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:27.154030 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:27 GMT
	I0916 11:12:27.154049 2195229 round_trippers.go:580]     Audit-Id: f4faf2c8-149b-47b4-8721-11e4b3e5dea7
	I0916 11:12:27.154077 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:27.154097 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:27.154112 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:27.154128 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:27.154284 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:27.154945 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:27.154964 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:27.154973 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:27.154977 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:27.157385 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:27.157413 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:27.157422 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:27 GMT
	I0916 11:12:27.157427 2195229 round_trippers.go:580]     Audit-Id: 63d1453f-5527-4a18-9bab-20787eb794dd
	I0916 11:12:27.157430 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:27.157434 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:27.157439 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:27.157444 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:27.157711 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:27.650752 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:27.650789 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:27.650799 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:27.650804 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:27.652989 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:27.653010 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:27.653018 2195229 round_trippers.go:580]     Audit-Id: 2edc3022-5361-4c55-a38e-08834ca9a6a3
	I0916 11:12:27.653022 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:27.653056 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:27.653066 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:27.653069 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:27.653072 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:27 GMT
	I0916 11:12:27.653411 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:27.654004 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:27.654022 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:27.654029 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:27.654034 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:27.656187 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:27.656216 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:27.656226 2195229 round_trippers.go:580]     Audit-Id: 63e7f432-ab59-487e-8a95-b0ede27b0ead
	I0916 11:12:27.656231 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:27.656236 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:27.656239 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:27.656243 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:27.656246 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:27 GMT
	I0916 11:12:27.656667 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:28.151364 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:28.151389 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:28.151400 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:28.151406 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:28.154044 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:28.154066 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:28.154075 2195229 round_trippers.go:580]     Audit-Id: bc67794f-3799-48cb-a9ed-b46abd1c11cd
	I0916 11:12:28.154080 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:28.154085 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:28.154089 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:28.154092 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:28.154094 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:28 GMT
	I0916 11:12:28.154321 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:28.154971 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:28.154991 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:28.154999 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:28.155004 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:28.156946 2195229 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:12:28.156967 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:28.156975 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:28.156979 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:28.156982 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:28 GMT
	I0916 11:12:28.156985 2195229 round_trippers.go:580]     Audit-Id: 6ae951d6-f3a5-45a6-801a-a7d300b5a49b
	I0916 11:12:28.156988 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:28.156990 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:28.157200 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:28.651048 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:28.651070 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:28.651079 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:28.651083 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:28.653740 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:28.653761 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:28.653770 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:28.653776 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:28.653779 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:28.653785 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:28 GMT
	I0916 11:12:28.653788 2195229 round_trippers.go:580]     Audit-Id: a0def33d-5995-4ba0-bf75-1b2eb95565b1
	I0916 11:12:28.653793 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:28.654337 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:28.655177 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:28.655200 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:28.655209 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:28.655220 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:28.657568 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:28.657600 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:28.657609 2195229 round_trippers.go:580]     Audit-Id: 1bc47375-587a-4569-8c18-60e7b6d4ef3d
	I0916 11:12:28.657615 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:28.657621 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:28.657624 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:28.657628 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:28.657633 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:28 GMT
	I0916 11:12:28.657818 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:28.658242 2195229 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:12:29.151513 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:29.151541 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:29.151551 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:29.151555 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:29.154149 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:29.154175 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:29.154183 2195229 round_trippers.go:580]     Audit-Id: c4ace1fd-75a5-4a8a-9d8e-03fefbce3a06
	I0916 11:12:29.154188 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:29.154191 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:29.154194 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:29.154197 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:29.154200 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:29 GMT
	I0916 11:12:29.154541 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:29.155201 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:29.155220 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:29.155234 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:29.155239 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:29.158826 2195229 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:12:29.158848 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:29.158856 2195229 round_trippers.go:580]     Audit-Id: 768fbe78-c67f-431d-9bfb-9b9f792c676e
	I0916 11:12:29.158861 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:29.158866 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:29.158869 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:29.158873 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:29.158876 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:29 GMT
	I0916 11:12:29.159288 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:29.650653 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:29.650709 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:29.650720 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:29.650724 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:29.653076 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:29.653150 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:29.653173 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:29 GMT
	I0916 11:12:29.653192 2195229 round_trippers.go:580]     Audit-Id: ac1978f0-7af0-4ae1-a127-c267bf646d89
	I0916 11:12:29.653224 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:29.653247 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:29.653263 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:29.653283 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:29.653535 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:29.654149 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:29.654167 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:29.654176 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:29.654179 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:29.656477 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:29.656498 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:29.656506 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:29.656511 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:29 GMT
	I0916 11:12:29.656514 2195229 round_trippers.go:580]     Audit-Id: c19c1e17-b7c8-405b-a9ef-a81ddf1c2b0c
	I0916 11:12:29.656518 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:29.656521 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:29.656526 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:29.656766 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:30.150960 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:30.150986 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:30.150996 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:30.151000 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:30.153933 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:30.153960 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:30.153970 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:30.153974 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:30.153979 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:30 GMT
	I0916 11:12:30.153983 2195229 round_trippers.go:580]     Audit-Id: 685958c8-ea02-4a46-b7c6-894830a5d322
	I0916 11:12:30.153988 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:30.153991 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:30.154175 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:30.154886 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:30.154910 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:30.154919 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:30.154925 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:30.157511 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:30.157535 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:30.157543 2195229 round_trippers.go:580]     Audit-Id: 4cc0826d-0af1-4032-ac1f-3e3c573e4063
	I0916 11:12:30.157548 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:30.157551 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:30.157554 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:30.157557 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:30.157560 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:30 GMT
	I0916 11:12:30.157733 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:30.650839 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:30.650874 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:30.650885 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:30.650891 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:30.653287 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:30.653311 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:30.653325 2195229 round_trippers.go:580]     Audit-Id: 7fd3d896-a66b-4c8d-971b-b2a51b66dff4
	I0916 11:12:30.653329 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:30.653334 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:30.653337 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:30.653341 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:30.653344 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:30 GMT
	I0916 11:12:30.653816 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:30.654404 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:30.654414 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:30.654423 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:30.654427 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:30.656550 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:30.656572 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:30.656580 2195229 round_trippers.go:580]     Audit-Id: 159429c3-9c15-4cc8-a327-7c00c39025d0
	I0916 11:12:30.656585 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:30.656588 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:30.656590 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:30.656593 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:30.656596 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:30 GMT
	I0916 11:12:30.656941 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:31.150958 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:31.150987 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:31.150995 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:31.151000 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:31.153284 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:31.153309 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:31.153317 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:31.153321 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:31.153324 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:31 GMT
	I0916 11:12:31.153327 2195229 round_trippers.go:580]     Audit-Id: 4c3d8d63-19e2-4a27-bd83-bddc28e548b3
	I0916 11:12:31.153329 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:31.153332 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:31.153692 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:31.154254 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:31.154273 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:31.154282 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:31.154290 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:31.156270 2195229 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:12:31.156319 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:31.156340 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:31.156357 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:31.156375 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:31 GMT
	I0916 11:12:31.156401 2195229 round_trippers.go:580]     Audit-Id: 75ec66ab-8ee3-48df-a16e-da6df1575456
	I0916 11:12:31.156430 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:31.156448 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:31.156660 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:31.157053 2195229 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:12:31.651083 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:31.651106 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:31.651116 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:31.651121 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:31.653607 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:31.653629 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:31.653638 2195229 round_trippers.go:580]     Audit-Id: f12d445a-1024-4f8b-826c-05e899988999
	I0916 11:12:31.653641 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:31.653644 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:31.653647 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:31.653650 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:31.653653 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:31 GMT
	I0916 11:12:31.653987 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:31.654575 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:31.654595 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:31.654604 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:31.654609 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:31.656744 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:31.656829 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:31.656852 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:31 GMT
	I0916 11:12:31.656895 2195229 round_trippers.go:580]     Audit-Id: c4b665e7-98ab-44ac-8c78-61561d80ee35
	I0916 11:12:31.656916 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:31.656931 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:31.656963 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:31.656983 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:31.657128 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:32.151388 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:32.151418 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:32.151429 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:32.151435 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:32.154057 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:32.154082 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:32.154091 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:32.154095 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:32.154100 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:32.154103 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:32 GMT
	I0916 11:12:32.154106 2195229 round_trippers.go:580]     Audit-Id: 733a9290-5ec8-4096-8799-dfc6dde7cdd5
	I0916 11:12:32.154109 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:32.154340 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:32.155049 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:32.155069 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:32.155078 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:32.155082 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:32.157166 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:32.157190 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:32.157206 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:32.157209 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:32.157213 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:32.157216 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:32 GMT
	I0916 11:12:32.157218 2195229 round_trippers.go:580]     Audit-Id: 5fedc1d5-611f-4947-b483-96f376cb8ac5
	I0916 11:12:32.157221 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:32.157386 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:32.651270 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:32.651297 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:32.651307 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:32.651312 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:32.654301 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:32.654327 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:32.654337 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:32.654341 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:32.654344 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:32.654365 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:32.654369 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:32 GMT
	I0916 11:12:32.654372 2195229 round_trippers.go:580]     Audit-Id: 3fb0c1d9-f621-4709-9c0b-6ae3ef4544d5
	I0916 11:12:32.655228 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:32.655849 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:32.655869 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:32.655878 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:32.655884 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:32.658654 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:32.658682 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:32.658693 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:32.658697 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:32 GMT
	I0916 11:12:32.658700 2195229 round_trippers.go:580]     Audit-Id: 32335918-e418-406b-ba65-1fe94fc3f38f
	I0916 11:12:32.658703 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:32.658706 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:32.658724 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:32.659163 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:33.151178 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:33.151250 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:33.151276 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:33.151294 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:33.154089 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:33.154164 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:33.154192 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:33.154209 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:33.154239 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:33.154258 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:33.154273 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:33 GMT
	I0916 11:12:33.154289 2195229 round_trippers.go:580]     Audit-Id: b1607c57-ed12-4965-a387-bb0cf3f8bc36
	I0916 11:12:33.154437 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:33.155120 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:33.155169 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:33.155191 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:33.155211 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:33.157516 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:33.157580 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:33.157602 2195229 round_trippers.go:580]     Audit-Id: 8edc8e33-de94-44b0-aac4-2f208caed0c3
	I0916 11:12:33.157618 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:33.157636 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:33.157670 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:33.157688 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:33.157703 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:33 GMT
	I0916 11:12:33.157920 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:33.158370 2195229 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:12:33.651385 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:33.651461 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:33.651485 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:33.651503 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:33.654268 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:33.654392 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:33.654432 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:33 GMT
	I0916 11:12:33.654455 2195229 round_trippers.go:580]     Audit-Id: 1bf863f6-fb43-4b61-8326-8da0cde2ac1f
	I0916 11:12:33.654474 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:33.654507 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:33.654528 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:33.654545 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:33.654789 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:33.655480 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:33.655531 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:33.655556 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:33.655576 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:33.657995 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:33.658042 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:33.658076 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:33.658096 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:33.658112 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:33 GMT
	I0916 11:12:33.658129 2195229 round_trippers.go:580]     Audit-Id: 15c8b875-fa67-43dd-9e26-7c175cc8d7a7
	I0916 11:12:33.658158 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:33.658179 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:33.658813 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:34.150897 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:34.150924 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:34.150937 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:34.150941 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:34.153562 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:34.153655 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:34.153693 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:34.153729 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:34.153760 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:34 GMT
	I0916 11:12:34.153777 2195229 round_trippers.go:580]     Audit-Id: ae9ab162-192f-4f2d-93a3-8e8f38c5d885
	I0916 11:12:34.153812 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:34.153829 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:34.153963 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:34.154577 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:34.154595 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:34.154617 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:34.154623 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:34.156801 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:34.156880 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:34.156920 2195229 round_trippers.go:580]     Audit-Id: 97f27734-ce76-426d-8800-7aab70b1ca83
	I0916 11:12:34.156940 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:34.156957 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:34.156997 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:34.157027 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:34.157065 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:34 GMT
	I0916 11:12:34.157324 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:34.651609 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:34.651639 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:34.651649 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:34.651654 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:34.654064 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:34.654091 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:34.654099 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:34.654104 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:34.654107 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:34.654112 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:34.654119 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:34 GMT
	I0916 11:12:34.654122 2195229 round_trippers.go:580]     Audit-Id: d5772cff-2525-4896-939a-1f4fce8077a6
	I0916 11:12:34.654359 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:34.654997 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:34.655013 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:34.655022 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:34.655029 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:34.656870 2195229 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:12:34.656890 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:34.656899 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:34.656903 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:34.656907 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:34.656911 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:34.656915 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:34 GMT
	I0916 11:12:34.656918 2195229 round_trippers.go:580]     Audit-Id: 7ad35126-a701-4ec1-aec2-be1f77cd3448
	I0916 11:12:34.657205 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:35.150907 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:35.150934 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:35.150943 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:35.150948 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:35.153494 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:35.153521 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:35.153531 2195229 round_trippers.go:580]     Audit-Id: 6491e4c2-572d-406e-858d-509254c16fbd
	I0916 11:12:35.153536 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:35.153540 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:35.153544 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:35.153548 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:35.153551 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:35 GMT
	I0916 11:12:35.153879 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:35.154507 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:35.154527 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:35.154536 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:35.154542 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:35.160398 2195229 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:12:35.160479 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:35.160501 2195229 round_trippers.go:580]     Audit-Id: 23179305-708c-40bc-9e56-116b001d1f97
	I0916 11:12:35.160516 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:35.160547 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:35.160570 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:35.160620 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:35.160649 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:35 GMT
	I0916 11:12:35.160804 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:35.161214 2195229 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:12:35.651345 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:35.651368 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:35.651379 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:35.651384 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:35.653639 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:35.653667 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:35.653675 2195229 round_trippers.go:580]     Audit-Id: 8bba091f-6ca7-4517-9527-a69b588f8cb1
	I0916 11:12:35.653680 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:35.653683 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:35.653685 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:35.653707 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:35.653712 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:35 GMT
	I0916 11:12:35.654067 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:35.654661 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:35.654706 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:35.654714 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:35.654718 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:35.656734 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:35.656754 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:35.656762 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:35.656768 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:35.656781 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:35 GMT
	I0916 11:12:35.656784 2195229 round_trippers.go:580]     Audit-Id: ce3f9a6c-f461-49ad-b002-d38174d0a035
	I0916 11:12:35.656787 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:35.656790 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:35.656945 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:36.151243 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:36.151269 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:36.151278 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:36.151283 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:36.153842 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:36.153864 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:36.153873 2195229 round_trippers.go:580]     Audit-Id: 596238e6-677f-4ca2-96e7-effe51376bd6
	I0916 11:12:36.153878 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:36.153894 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:36.153897 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:36.153900 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:36.153904 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:36 GMT
	I0916 11:12:36.154028 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:36.154638 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:36.154654 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:36.154664 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:36.154668 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:36.156917 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:36.156940 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:36.156948 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:36.156952 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:36.156956 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:36.156959 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:36 GMT
	I0916 11:12:36.156962 2195229 round_trippers.go:580]     Audit-Id: d5752f0d-bbcb-4642-819e-278524113f6d
	I0916 11:12:36.156965 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:36.157257 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:36.650836 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:36.650863 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:36.650873 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:36.650878 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:36.653251 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:36.653273 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:36.653282 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:36.653288 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:36.653293 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:36.653297 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:36 GMT
	I0916 11:12:36.653300 2195229 round_trippers.go:580]     Audit-Id: 954b4712-d7b7-4b5b-919e-0240331c3df4
	I0916 11:12:36.653303 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:36.653780 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:36.654354 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:36.654372 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:36.654380 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:36.654386 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:36.656670 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:36.656695 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:36.656704 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:36.656714 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:36.656741 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:36.656753 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:36 GMT
	I0916 11:12:36.656756 2195229 round_trippers.go:580]     Audit-Id: 81e56b71-820c-4b83-85db-1380629812c2
	I0916 11:12:36.656759 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:36.656960 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:37.151161 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:37.151186 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:37.151196 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:37.151202 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:37.153721 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:37.153749 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:37.153757 2195229 round_trippers.go:580]     Audit-Id: 8fc3236b-8b36-4227-8806-903509c084ba
	I0916 11:12:37.153763 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:37.153767 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:37.153770 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:37.153773 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:37.153777 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:37 GMT
	I0916 11:12:37.154339 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:37.154977 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:37.154997 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:37.155006 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:37.155012 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:37.157365 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:37.157389 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:37.157398 2195229 round_trippers.go:580]     Audit-Id: 2288263a-cf4d-4c6b-bb30-7130ad134224
	I0916 11:12:37.157402 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:37.157407 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:37.157436 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:37.157443 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:37.157447 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:37 GMT
	I0916 11:12:37.157616 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:37.651056 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:37.651085 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:37.651093 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:37.651117 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:37.653674 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:37.653756 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:37.653796 2195229 round_trippers.go:580]     Audit-Id: b54f3269-2964-4897-b6b2-aeb0244d930f
	I0916 11:12:37.653830 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:37.653850 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:37.653869 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:37.653919 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:37.653934 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:37 GMT
	I0916 11:12:37.654098 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:37.654771 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:37.654790 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:37.654799 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:37.654804 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:37.657071 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:37.657129 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:37.657151 2195229 round_trippers.go:580]     Audit-Id: 0e47ddf0-c6c7-4421-a885-6aefd779eed1
	I0916 11:12:37.657170 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:37.657186 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:37.657213 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:37.657234 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:37.657250 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:37 GMT
	I0916 11:12:37.657419 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:37.657822 2195229 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:12:38.150869 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:38.150893 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:38.150902 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:38.150909 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:38.153387 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:38.153412 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:38.153426 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:38 GMT
	I0916 11:12:38.153431 2195229 round_trippers.go:580]     Audit-Id: 86c1412b-1542-4219-b5af-66d341c32561
	I0916 11:12:38.153436 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:38.153439 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:38.153441 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:38.153444 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:38.153743 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:38.154323 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:38.154339 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:38.154348 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:38.154354 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:38.156460 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:38.156481 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:38.156489 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:38.156493 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:38.156496 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:38.156499 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:38.156502 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:38 GMT
	I0916 11:12:38.156507 2195229 round_trippers.go:580]     Audit-Id: 66beb5ad-8d77-455a-ab9c-c00b6a33cb40
	I0916 11:12:38.156734 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:38.650905 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:38.650931 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:38.650941 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:38.650946 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:38.653363 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:38.653391 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:38.653400 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:38.653405 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:38.653435 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:38.653448 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:38.653458 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:38 GMT
	I0916 11:12:38.653461 2195229 round_trippers.go:580]     Audit-Id: 8f9dc819-4f4f-4cb1-ad2e-b85eeaab0d0e
	I0916 11:12:38.653890 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:38.654655 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:38.654730 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:38.654758 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:38.654765 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:38.656955 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:38.656977 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:38.656985 2195229 round_trippers.go:580]     Audit-Id: 3e6f3df6-8db9-465d-8308-82ac66d7643d
	I0916 11:12:38.656990 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:38.656994 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:38.657026 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:38.657054 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:38.657072 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:38 GMT
	I0916 11:12:38.657248 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:39.151448 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:39.151476 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:39.151486 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:39.151492 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:39.154178 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:39.154264 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:39.154287 2195229 round_trippers.go:580]     Audit-Id: 7ea1f8cb-b490-4073-9ff5-384fdfe4e6ec
	I0916 11:12:39.154302 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:39.154307 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:39.154312 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:39.154316 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:39.154337 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:39 GMT
	I0916 11:12:39.154518 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:39.155147 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:39.155178 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:39.155188 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:39.155193 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:39.157474 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:39.157498 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:39.157507 2195229 round_trippers.go:580]     Audit-Id: db87f119-6fc4-4811-b304-2a2284a252e1
	I0916 11:12:39.157511 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:39.157514 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:39.157519 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:39.157524 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:39.157527 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:39 GMT
	I0916 11:12:39.157788 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:39.650870 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:39.650893 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:39.650903 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:39.650908 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:39.653417 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:39.653485 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:39.653506 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:39.653524 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:39 GMT
	I0916 11:12:39.653541 2195229 round_trippers.go:580]     Audit-Id: 13ba35a2-812a-481d-804f-9aeafe5b335c
	I0916 11:12:39.653568 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:39.653591 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:39.653609 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:39.653769 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:39.654395 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:39.654419 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:39.654427 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:39.654433 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:39.656718 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:39.656740 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:39.656749 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:39.656752 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:39.656756 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:39.656759 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:39 GMT
	I0916 11:12:39.656762 2195229 round_trippers.go:580]     Audit-Id: 146e5c27-49d4-45d9-b9df-68f61733a864
	I0916 11:12:39.656765 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:39.656890 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:40.150863 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:40.150889 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:40.150899 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:40.150905 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:40.153466 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:40.153493 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:40.153502 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:40.153507 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:40.153511 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:40.153515 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:40 GMT
	I0916 11:12:40.153518 2195229 round_trippers.go:580]     Audit-Id: 93dd3fb7-e850-48da-ab77-d1d01706b1b7
	I0916 11:12:40.153521 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:40.153999 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:40.154666 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:40.154759 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:40.154784 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:40.154793 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:40.157535 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:40.157613 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:40.157638 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:40.157687 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:40 GMT
	I0916 11:12:40.157698 2195229 round_trippers.go:580]     Audit-Id: 427edfae-d71f-4666-98a5-3772b04b7404
	I0916 11:12:40.157702 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:40.157706 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:40.157709 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:40.157838 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:40.158286 2195229 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:12:40.651357 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:40.651381 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:40.651389 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:40.651393 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:40.653852 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:40.653890 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:40.653900 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:40.653903 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:40 GMT
	I0916 11:12:40.653906 2195229 round_trippers.go:580]     Audit-Id: 2885c97b-4717-4e97-9fa9-4462f7a5bdae
	I0916 11:12:40.653910 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:40.653913 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:40.653919 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:40.654186 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:40.654800 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:40.654852 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:40.654867 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:40.654872 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:40.656974 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:40.657032 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:40.657056 2195229 round_trippers.go:580]     Audit-Id: 630328da-d56f-41e9-86bb-03f43fd2cf1d
	I0916 11:12:40.657074 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:40.657088 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:40.657122 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:40.657138 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:40.657154 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:40 GMT
	I0916 11:12:40.657300 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:41.151103 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:41.151129 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:41.151141 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:41.151145 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:41.153463 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:41.153522 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:41.153530 2195229 round_trippers.go:580]     Audit-Id: 1dd28138-c51f-4970-a91c-97bed6481d0b
	I0916 11:12:41.153536 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:41.153540 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:41.153544 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:41.153558 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:41.153565 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:41 GMT
	I0916 11:12:41.153688 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:41.154279 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:41.154297 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:41.154305 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:41.154310 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:41.156526 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:41.156591 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:41.156616 2195229 round_trippers.go:580]     Audit-Id: 6db66d0f-ed27-4ea5-82d5-e5e8965393fc
	I0916 11:12:41.156633 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:41.156664 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:41.156684 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:41.156700 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:41.156715 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:41 GMT
	I0916 11:12:41.156863 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:41.651409 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:41.651494 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:41.651515 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:41.651519 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:41.654308 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:41.654343 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:41.654363 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:41.654370 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:41.654374 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:41.654379 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:41 GMT
	I0916 11:12:41.654382 2195229 round_trippers.go:580]     Audit-Id: 37ee64a3-c604-40d5-82c5-1fd9f2c4ac60
	I0916 11:12:41.654389 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:41.654877 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:41.655512 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:41.655533 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:41.655541 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:41.655548 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:41.657599 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:41.657621 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:41.657629 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:41.657635 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:41 GMT
	I0916 11:12:41.657638 2195229 round_trippers.go:580]     Audit-Id: 8645242f-e830-443b-81cf-abfcd44abbb3
	I0916 11:12:41.657642 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:41.657644 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:41.657647 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:41.657937 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:42.150997 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:42.151028 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:42.151039 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:42.151044 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:42.154108 2195229 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:12:42.154215 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:42.154240 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:42.154260 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:42.154295 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:42 GMT
	I0916 11:12:42.154318 2195229 round_trippers.go:580]     Audit-Id: 4a01cb16-9fea-40d0-91e7-802e19ca8829
	I0916 11:12:42.154335 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:42.154349 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:42.154620 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:42.155450 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:42.155472 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:42.155482 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:42.155486 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:42.158090 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:42.158169 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:42.158195 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:42.158214 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:42.158226 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:42.158242 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:42.158247 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:42 GMT
	I0916 11:12:42.158274 2195229 round_trippers.go:580]     Audit-Id: 5343e996-abbc-4033-858e-b14b04ab2b88
	I0916 11:12:42.158451 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:42.159001 2195229 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:12:42.651298 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:42.651319 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:42.651329 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:42.651335 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:42.653653 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:42.653677 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:42.653687 2195229 round_trippers.go:580]     Audit-Id: 1feb078c-b12f-42bd-88b0-2112578dcd27
	I0916 11:12:42.653693 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:42.653695 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:42.653698 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:42.653701 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:42.653703 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:42 GMT
	I0916 11:12:42.654120 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:42.654775 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:42.654796 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:42.654804 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:42.654810 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:42.657044 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:42.657064 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:42.657073 2195229 round_trippers.go:580]     Audit-Id: 60b3f14d-cdd9-44d5-86fd-aaebf3f6ddcf
	I0916 11:12:42.657078 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:42.657082 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:42.657086 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:42.657090 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:42.657093 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:42 GMT
	I0916 11:12:42.657247 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:43.151387 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:43.151410 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:43.151421 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:43.151427 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:43.153706 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:43.153775 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:43.153797 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:43.153815 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:43.153829 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:43 GMT
	I0916 11:12:43.153861 2195229 round_trippers.go:580]     Audit-Id: c1bb736f-c7eb-4886-ad8c-4253f8207cbc
	I0916 11:12:43.153877 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:43.153894 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:43.154033 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:43.154640 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:43.154658 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:43.154667 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:43.154671 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:43.156990 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:43.157011 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:43.157024 2195229 round_trippers.go:580]     Audit-Id: c426266d-cc19-417c-8395-eb9a5847751b
	I0916 11:12:43.157032 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:43.157036 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:43.157040 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:43.157043 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:43.157047 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:43 GMT
	I0916 11:12:43.157359 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:43.651636 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:43.651659 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:43.651668 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:43.651673 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:43.654047 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:43.654105 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:43.654128 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:43.654146 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:43.654176 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:43.654195 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:43.654211 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:43 GMT
	I0916 11:12:43.654229 2195229 round_trippers.go:580]     Audit-Id: d2f5077c-a14e-400b-866c-a20d5597f6b7
	I0916 11:12:43.654416 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:43.655031 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:43.655050 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:43.655069 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:43.655073 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:43.656967 2195229 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:12:43.656983 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:43.656991 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:43.656996 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:43 GMT
	I0916 11:12:43.656999 2195229 round_trippers.go:580]     Audit-Id: 4242fdc4-3958-4dfc-92a7-3a19764f605b
	I0916 11:12:43.657002 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:43.657005 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:43.657008 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:43.657151 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:44.150664 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:44.150783 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:44.150793 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:44.150799 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:44.153170 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:44.153191 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:44.153199 2195229 round_trippers.go:580]     Audit-Id: 3df98992-4e02-440e-af2d-9bd5145e41d3
	I0916 11:12:44.153203 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:44.153206 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:44.153208 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:44.153212 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:44.153214 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:44 GMT
	I0916 11:12:44.153323 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:44.153894 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:44.153910 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:44.153918 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:44.153922 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:44.155995 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:44.156046 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:44.156056 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:44.156061 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:44.156065 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:44 GMT
	I0916 11:12:44.156068 2195229 round_trippers.go:580]     Audit-Id: 617c5bf0-1bce-4094-9a3b-b99b5d535a73
	I0916 11:12:44.156071 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:44.156073 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:44.156335 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:44.651554 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:44.651586 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:44.651596 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:44.651600 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:44.654184 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:44.654214 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:44.654223 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:44.654227 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:44.654231 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:44.654234 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:44.654238 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:44 GMT
	I0916 11:12:44.654244 2195229 round_trippers.go:580]     Audit-Id: 78ae30dd-246c-456f-9780-7c877dee3f6b
	I0916 11:12:44.654509 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:44.655135 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:44.655156 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:44.655167 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:44.655177 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:44.657421 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:44.657443 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:44.657450 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:44.657456 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:44.657459 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:44.657461 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:44.657464 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:44 GMT
	I0916 11:12:44.657468 2195229 round_trippers.go:580]     Audit-Id: f05de7ca-e870-4843-80dd-827c2643f410
	I0916 11:12:44.657905 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:44.658327 2195229 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:12:45.150990 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:45.151020 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:45.151033 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:45.151039 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:45.156206 2195229 round_trippers.go:574] Response Status: 200 OK in 5 milliseconds
	I0916 11:12:45.156232 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:45.156241 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:45.156246 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:45.156250 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:45.156253 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:45 GMT
	I0916 11:12:45.156257 2195229 round_trippers.go:580]     Audit-Id: b8b1575a-8af6-43a4-9b39-5196138675db
	I0916 11:12:45.156260 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:45.157417 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:45.158203 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:45.158222 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:45.158232 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:45.158243 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:45.161549 2195229 round_trippers.go:574] Response Status: 200 OK in 3 milliseconds
	I0916 11:12:45.161579 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:45.161591 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:45.161597 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:45.161603 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:45.161607 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:45 GMT
	I0916 11:12:45.161612 2195229 round_trippers.go:580]     Audit-Id: 0ac8725c-51dc-484e-90cf-66b1e03b3751
	I0916 11:12:45.161617 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:45.162169 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:45.650838 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:45.650868 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:45.650879 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:45.650885 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:45.653285 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:45.653311 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:45.653344 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:45 GMT
	I0916 11:12:45.653348 2195229 round_trippers.go:580]     Audit-Id: bafbf322-0659-4e25-863b-12d1a2f37dbc
	I0916 11:12:45.653350 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:45.653354 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:45.653357 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:45.653359 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:45.653655 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:45.654286 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:45.654302 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:45.654312 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:45.654316 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:45.656392 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:45.656411 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:45.656419 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:45 GMT
	I0916 11:12:45.656424 2195229 round_trippers.go:580]     Audit-Id: 775fb4a1-679e-47a7-8c95-f02059e47f10
	I0916 11:12:45.656428 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:45.656431 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:45.656434 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:45.656436 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:45.656555 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:46.150732 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:46.150758 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:46.150768 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:46.150775 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:46.153179 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:46.153201 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:46.153212 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:46.153217 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:46.153222 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:46 GMT
	I0916 11:12:46.153227 2195229 round_trippers.go:580]     Audit-Id: 8d011389-dca7-462d-a0c1-1a2eea0403f5
	I0916 11:12:46.153230 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:46.153233 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:46.153352 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:46.153932 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:46.153943 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:46.153951 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:46.153955 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:46.156215 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:46.156239 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:46.156248 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:46.156257 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:46.156261 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:46.156265 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:46.156268 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:46 GMT
	I0916 11:12:46.156271 2195229 round_trippers.go:580]     Audit-Id: 1ead6d26-dd26-4cc9-8fd1-854947a335fe
	I0916 11:12:46.156734 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:46.651423 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:46.651450 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:46.651460 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:46.651465 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:46.653939 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:46.653979 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:46.653988 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:46 GMT
	I0916 11:12:46.653996 2195229 round_trippers.go:580]     Audit-Id: b1eaac92-8094-476b-bba9-e41db2fcda85
	I0916 11:12:46.654000 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:46.654004 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:46.654009 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:46.654012 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:46.654332 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:46.655042 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:46.655059 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:46.655068 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:46.655078 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:46.657470 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:46.657493 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:46.657501 2195229 round_trippers.go:580]     Audit-Id: fb232be7-4f8b-41f8-9fc1-1ff665a3d5df
	I0916 11:12:46.657507 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:46.657510 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:46.657514 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:46.657517 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:46.657520 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:46 GMT
	I0916 11:12:46.657826 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:47.150881 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:47.150906 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.150915 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.150919 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.153312 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:47.153335 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.153343 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.153347 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.153351 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.153354 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.153357 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.153361 2195229 round_trippers.go:580]     Audit-Id: c39578c8-21ae-4666-9afb-669cb57efd90
	I0916 11:12:47.153583 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1051","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6923 chars]
	I0916 11:12:47.154176 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:47.154197 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.154206 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.154211 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.156187 2195229 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:12:47.156205 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.156213 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.156217 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.156221 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.156224 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.156227 2195229 round_trippers.go:580]     Audit-Id: ec52ebfb-0a0a-420f-b77e-6c8f47a5f5b8
	I0916 11:12:47.156230 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.156334 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:47.156704 2195229 pod_ready.go:103] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"False"
	I0916 11:12:47.651508 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/coredns-7c65d6cfc9-vp22b
	I0916 11:12:47.651551 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.651564 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.651569 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.654217 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:47.654247 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.654256 2195229 round_trippers.go:580]     Audit-Id: 83f32ba8-8870-401b-ab7f-891da5612327
	I0916 11:12:47.654259 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.654262 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.654265 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.654268 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.654271 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.654505 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"coredns-7c65d6cfc9-vp22b","generateName":"coredns-7c65d6cfc9-","namespace":"kube-system","uid":"a6adb735-448b-480b-aba1-3ce4d56c6fc7","resourceVersion":"1140","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"k8s-app":"kube-dns","pod-template-hash":"7c65d6cfc9"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"ReplicaSet","name":"coredns-7c65d6cfc9","uid":"5d467f48-bbcc-4946-9ba8-7b587a02872b","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:k8s-app":{},"f:pod-template-hash":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"5d467f48-bbcc-4946-9ba8-7b587a02872b\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:podAntiAffinity":{".":{},"f:preferredDuringSchedulingIgnoredDuringExecution":
{}}},"f:containers":{"k:{\"name\":\"coredns\"}":{".":{},"f:args":{},"f: [truncated 6694 chars]
	I0916 11:12:47.655108 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:47.655128 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.655137 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.655148 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.656985 2195229 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:12:47.657001 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.657009 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.657013 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.657018 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.657021 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.657026 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.657030 2195229 round_trippers.go:580]     Audit-Id: 0df7a68a-ab4a-4ec8-9b76-68f94c7c2ef5
	I0916 11:12:47.657148 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:47.657501 2195229 pod_ready.go:93] pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:47.657513 2195229 pod_ready.go:82] duration metric: took 21.007108682s for pod "coredns-7c65d6cfc9-vp22b" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:47.657523 2195229 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:47.657611 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-890146
	I0916 11:12:47.657617 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.657625 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.657630 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.659628 2195229 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:12:47.659739 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.659750 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.659755 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.659759 2195229 round_trippers.go:580]     Audit-Id: f9b92386-cf84-48af-9051-d9916c401645
	I0916 11:12:47.659764 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.659767 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.659770 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.659896 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"etcd-multinode-890146","namespace":"kube-system","uid":"59c960ab-f6dd-4ed3-856c-ba2a02295b12","resourceVersion":"1075","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"etcd","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/etcd.advertise-client-urls":"https://192.168.58.2:2379","kubernetes.io/config.hash":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.mirror":"addcbe15d5cf59de690537b86e0ce876","kubernetes.io/config.seen":"2024-09-16T11:07:32.783608135Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kubernetes.io/etcd.advertise-c
lient-urls":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config [truncated 6654 chars]
	I0916 11:12:47.660437 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:47.660457 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.660465 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.660486 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.662603 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:47.662625 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.662637 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.662641 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.662650 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.662654 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.662660 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.662663 2195229 round_trippers.go:580]     Audit-Id: 879b0e1b-7732-4530-b037-5b2ed0b6623f
	I0916 11:12:47.663158 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:47.663634 2195229 pod_ready.go:93] pod "etcd-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:47.663655 2195229 pod_ready.go:82] duration metric: took 6.124734ms for pod "etcd-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:47.663675 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:47.663744 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-890146
	I0916 11:12:47.663754 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.663763 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.663767 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.666037 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:47.666061 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.666070 2195229 round_trippers.go:580]     Audit-Id: 5bc812a2-8fae-4e5d-babe-0edaae822402
	I0916 11:12:47.666075 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.666079 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.666082 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.666085 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.666089 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.666481 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-apiserver-multinode-890146","namespace":"kube-system","uid":"9846229d-7227-453f-8c30-697aaba61648","resourceVersion":"1070","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-apiserver","tier":"control-plane"},"annotations":{"kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint":"192.168.58.2:8443","kubernetes.io/config.hash":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.mirror":"e9853b0a0cf2a497fe455d2a5a3241a9","kubernetes.io/config.seen":"2024-09-16T11:07:32.783610957Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubeadm.kub
ernetes.io/kube-apiserver.advertise-address.endpoint":{},"f:kubernetes. [truncated 8732 chars]
	I0916 11:12:47.667107 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:47.667127 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.667137 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.667149 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.669327 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:47.669344 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.669352 2195229 round_trippers.go:580]     Audit-Id: 0ad0693c-faa5-423f-9d7e-1ca751be7fdc
	I0916 11:12:47.669355 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.669358 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.669360 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.669364 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.669366 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.669540 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:47.669952 2195229 pod_ready.go:93] pod "kube-apiserver-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:47.669971 2195229 pod_ready.go:82] duration metric: took 6.284905ms for pod "kube-apiserver-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:47.669982 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:47.670058 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-890146
	I0916 11:12:47.670082 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.670107 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.670111 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.672579 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:47.672602 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.672610 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.672614 2195229 round_trippers.go:580]     Audit-Id: 0b71aaec-baa2-4bb7-b3fa-17ded838c8fc
	I0916 11:12:47.672618 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.672622 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.672625 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.672630 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.672821 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-controller-manager-multinode-890146","namespace":"kube-system","uid":"421b15a5-a8e2-4631-bf18-1592c8747b09","resourceVersion":"1067","creationTimestamp":"2024-09-16T11:07:33Z","labels":{"component":"kube-controller-manager","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.mirror":"727befc4343b7e938d1e1a5c10c43cc5","kubernetes.io/config.seen":"2024-09-16T11:07:32.783594137Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:33Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.
io/config.seen":{},"f:kubernetes.io/config.source":{}},"f:labels":{".": [truncated 8307 chars]
	I0916 11:12:47.673379 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:47.673396 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.673404 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.673410 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.675770 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:47.675794 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.675802 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.675806 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.675809 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.675812 2195229 round_trippers.go:580]     Audit-Id: 16ffe55a-db1a-4430-93ca-687103a6d72b
	I0916 11:12:47.675815 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.675818 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.676054 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:47.676431 2195229 pod_ready.go:93] pod "kube-controller-manager-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:47.676450 2195229 pod_ready.go:82] duration metric: took 6.449088ms for pod "kube-controller-manager-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:47.676463 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:47.676530 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-59f9h
	I0916 11:12:47.676540 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.676549 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.676554 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.678714 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:47.678741 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.678750 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.678755 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.678758 2195229 round_trippers.go:580]     Audit-Id: 86b39509-c23b-4138-814a-7d7aff3eb721
	I0916 11:12:47.678761 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.678769 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.678776 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.679258 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-59f9h","generateName":"kube-proxy-","namespace":"kube-system","uid":"a9a614fd-3de3-4fa0-b773-5d6a6054d0ea","resourceVersion":"1130","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6397 chars]
	I0916 11:12:47.679804 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m02
	I0916 11:12:47.679824 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.679832 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.679836 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.681842 2195229 round_trippers.go:574] Response Status: 200 OK in 1 milliseconds
	I0916 11:12:47.681874 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.681883 2195229 round_trippers.go:580]     Audit-Id: 60f1c761-e2b2-41cc-bbff-ef7982404c57
	I0916 11:12:47.681887 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.681890 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.681893 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.681897 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.681901 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.682276 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146-m02","uid":"96a49101-9e05-4adf-91c1-fec4ee0a7351","resourceVersion":"1078","creationTimestamp":"2024-09-16T11:08:09Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146-m02","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"false","minikube.k8s.io/updated_at":"2024_09_16T11_08_09_0700","minikube.k8s.io/version":"v1.34.0"},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubeadm","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:09Z","fieldsType":"FieldsV1","fieldsV1":{"f:metad
ata":{"f:annotations":{"f:kubeadm.alpha.kubernetes.io/cri-socket":{}}}} [truncated 5313 chars]
	I0916 11:12:47.682732 2195229 pod_ready.go:93] pod "kube-proxy-59f9h" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:47.682750 2195229 pod_ready.go:82] duration metric: took 6.276585ms for pod "kube-proxy-59f9h" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:47.682781 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:47.852223 2195229 request.go:632] Waited for 169.352243ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:12:47.852290 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-fm5qr
	I0916 11:12:47.852297 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:47.852305 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:47.852311 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:47.855186 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:47.855211 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:47.855220 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:47 GMT
	I0916 11:12:47.855223 2195229 round_trippers.go:580]     Audit-Id: 51b53e2a-5f14-48ae-8b03-b6f4a27e9369
	I0916 11:12:47.855226 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:47.855229 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:47.855232 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:47.855236 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:47.855469 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-fm5qr","generateName":"kube-proxy-","namespace":"kube-system","uid":"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73","resourceVersion":"1052","creationTimestamp":"2024-09-16T11:07:37Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:37Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:
requiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k [truncated 6389 chars]
	I0916 11:12:48.052491 2195229 request.go:632] Waited for 196.396977ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:48.052564 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:48.052570 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:48.052584 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:48.052592 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:48.055277 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:48.055355 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:48.055389 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:48.055425 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:48.055447 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:48.055464 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:48 GMT
	I0916 11:12:48.055475 2195229 round_trippers.go:580]     Audit-Id: cfe43a16-1af4-4644-b686-95c818083abb
	I0916 11:12:48.055478 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:48.055647 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:48.056070 2195229 pod_ready.go:93] pod "kube-proxy-fm5qr" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:48.056090 2195229 pod_ready.go:82] duration metric: took 373.300769ms for pod "kube-proxy-fm5qr" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:48.056102 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-vl27g" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:48.251970 2195229 request.go:632] Waited for 195.796931ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:12:48.252068 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-vl27g
	I0916 11:12:48.252082 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:48.252091 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:48.252103 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:48.254765 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:48.254793 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:48.254803 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:48.254807 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:48.254812 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:48.254816 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:48 GMT
	I0916 11:12:48.254820 2195229 round_trippers.go:580]     Audit-Id: 9bce6684-a8fa-4e65-a95c-d5b053930a9d
	I0916 11:12:48.254822 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:48.255067 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-proxy-vl27g","generateName":"kube-proxy-","namespace":"kube-system","uid":"8f7e9a8c-6e70-4445-b85e-11c5c03701be","resourceVersion":"945","creationTimestamp":"2024-09-16T11:08:46Z","labels":{"controller-revision-hash":"648b489c5b","k8s-app":"kube-proxy","pod-template-generation":"1"},"ownerReferences":[{"apiVersion":"apps/v1","kind":"DaemonSet","name":"kube-proxy","uid":"b307cee5-dff5-4557-b171-e8f0ce713737","controller":true,"blockOwnerDeletion":true}],"managedFields":[{"manager":"kube-controller-manager","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:08:46Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:generateName":{},"f:labels":{".":{},"f:controller-revision-hash":{},"f:k8s-app":{},"f:pod-template-generation":{}},"f:ownerReferences":{".":{},"k:{\"uid\":\"b307cee5-dff5-4557-b171-e8f0ce713737\"}":{}}},"f:spec":{"f:affinity":{".":{},"f:nodeAffinity":{".":{},"f:r
equiredDuringSchedulingIgnoredDuringExecution":{}}},"f:containers":{"k: [truncated 6396 chars]
	I0916 11:12:48.451975 2195229 request.go:632] Waited for 196.379779ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:12:48.452077 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146-m03
	I0916 11:12:48.452093 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:48.452101 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:48.452107 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:48.454257 2195229 round_trippers.go:574] Response Status: 404 Not Found in 2 milliseconds
	I0916 11:12:48.454282 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:48.454290 2195229 round_trippers.go:580]     Content-Length: 210
	I0916 11:12:48.454294 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:48 GMT
	I0916 11:12:48.454298 2195229 round_trippers.go:580]     Audit-Id: 91267747-39dc-42fc-9c2e-670d0765badb
	I0916 11:12:48.454324 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:48.454337 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:48.454340 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:48.454343 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:48.454364 2195229 request.go:1351] Response Body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"nodes \"multinode-890146-m03\" not found","reason":"NotFound","details":{"name":"multinode-890146-m03","kind":"nodes"},"code":404}
	I0916 11:12:48.454482 2195229 pod_ready.go:98] node "multinode-890146-m03" hosting pod "kube-proxy-vl27g" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "multinode-890146-m03": nodes "multinode-890146-m03" not found
	I0916 11:12:48.454498 2195229 pod_ready.go:82] duration metric: took 398.388738ms for pod "kube-proxy-vl27g" in "kube-system" namespace to be "Ready" ...
	E0916 11:12:48.454510 2195229 pod_ready.go:67] WaitExtra: waitPodCondition: node "multinode-890146-m03" hosting pod "kube-proxy-vl27g" in "kube-system" namespace is currently not "Ready" (skipping!): error getting node "multinode-890146-m03": nodes "multinode-890146-m03" not found
	I0916 11:12:48.454521 2195229 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:48.651834 2195229 request.go:632] Waited for 197.236652ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:48.651900 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-890146
	I0916 11:12:48.651911 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:48.651921 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:48.651926 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:48.654326 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:48.654350 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:48.654359 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:48.654365 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:48 GMT
	I0916 11:12:48.654368 2195229 round_trippers.go:580]     Audit-Id: 9c60ee1f-a013-4897-8b3a-2991d82ea82f
	I0916 11:12:48.654371 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:48.654373 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:48.654376 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:48.654871 2195229 request.go:1351] Response Body: {"kind":"Pod","apiVersion":"v1","metadata":{"name":"kube-scheduler-multinode-890146","namespace":"kube-system","uid":"6d440ef3-2e6e-4db9-8c28-2417f7a80c9f","resourceVersion":"1068","creationTimestamp":"2024-09-16T11:07:31Z","labels":{"component":"kube-scheduler","tier":"control-plane"},"annotations":{"kubernetes.io/config.hash":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.mirror":"cde6393a930362ca2ed095ff881a067c","kubernetes.io/config.seen":"2024-09-16T11:07:25.437074996Z","kubernetes.io/config.source":"file"},"ownerReferences":[{"apiVersion":"v1","kind":"Node","name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","controller":true}],"managedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1","time":"2024-09-16T11:07:31Z","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:annotations":{".":{},"f:kubernetes.io/config.hash":{},"f:kubernetes.io/config.mirror":{},"f:kubernetes.io/config.seen":{}
,"f:kubernetes.io/config.source":{}},"f:labels":{".":{},"f:component":{ [truncated 5189 chars]
	I0916 11:12:48.851620 2195229 request.go:632] Waited for 196.24543ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:48.851687 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes/multinode-890146
	I0916 11:12:48.851693 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:48.851702 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:48.851713 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:48.854006 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:48.854080 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:48.854098 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:48.854105 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:48 GMT
	I0916 11:12:48.854109 2195229 round_trippers.go:580]     Audit-Id: 2d61395d-72fc-49e2-96e3-d0ef54ae74fb
	I0916 11:12:48.854112 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:48.854115 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:48.854117 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:48.854204 2195229 request.go:1351] Response Body: {"kind":"Node","apiVersion":"v1","metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"managedFields":[{"manager":"kubelet","operation":"Update
","apiVersion":"v1","time":"2024-09-16T11:07:29Z","fieldsType":"FieldsV [truncated 5317 chars]
	I0916 11:12:48.854612 2195229 pod_ready.go:93] pod "kube-scheduler-multinode-890146" in "kube-system" namespace has status "Ready":"True"
	I0916 11:12:48.854629 2195229 pod_ready.go:82] duration metric: took 400.099768ms for pod "kube-scheduler-multinode-890146" in "kube-system" namespace to be "Ready" ...
	I0916 11:12:48.854641 2195229 pod_ready.go:39] duration metric: took 22.212929278s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0916 11:12:48.854669 2195229 system_svc.go:44] waiting for kubelet service to be running ....
	I0916 11:12:48.854758 2195229 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:12:48.867230 2195229 system_svc.go:56] duration metric: took 12.552383ms WaitForService to wait for kubelet
	I0916 11:12:48.867262 2195229 kubeadm.go:582] duration metric: took 22.338598095s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:12:48.867281 2195229 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:12:49.051534 2195229 request.go:632] Waited for 184.161131ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.58.2:8443/api/v1/nodes
	I0916 11:12:49.051621 2195229 round_trippers.go:463] GET https://192.168.58.2:8443/api/v1/nodes
	I0916 11:12:49.051634 2195229 round_trippers.go:469] Request Headers:
	I0916 11:12:49.051644 2195229 round_trippers.go:473]     Accept: application/json, */*
	I0916 11:12:49.051649 2195229 round_trippers.go:473]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0916 11:12:49.054277 2195229 round_trippers.go:574] Response Status: 200 OK in 2 milliseconds
	I0916 11:12:49.054299 2195229 round_trippers.go:577] Response Headers:
	I0916 11:12:49.054307 2195229 round_trippers.go:580]     Content-Type: application/json
	I0916 11:12:49.054312 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Flowschema-Uid: 7d8ef24c-8a8c-4faf-a516-a156e6b03d24
	I0916 11:12:49.054317 2195229 round_trippers.go:580]     X-Kubernetes-Pf-Prioritylevel-Uid: 0c029a97-049d-47de-808f-a79f5755fd6a
	I0916 11:12:49.054321 2195229 round_trippers.go:580]     Date: Mon, 16 Sep 2024 11:12:49 GMT
	I0916 11:12:49.054325 2195229 round_trippers.go:580]     Audit-Id: 80fd777b-9d82-4db0-be74-eeb39af7cd42
	I0916 11:12:49.054330 2195229 round_trippers.go:580]     Cache-Control: no-cache, private
	I0916 11:12:49.054482 2195229 request.go:1351] Response Body: {"kind":"NodeList","apiVersion":"v1","metadata":{"resourceVersion":"1145"},"items":[{"metadata":{"name":"multinode-890146","uid":"9d093e17-c8ce-4956-b81e-3218a4abad45","resourceVersion":"977","creationTimestamp":"2024-09-16T11:07:30Z","labels":{"beta.kubernetes.io/arch":"arm64","beta.kubernetes.io/os":"linux","kubernetes.io/arch":"arm64","kubernetes.io/hostname":"multinode-890146","kubernetes.io/os":"linux","minikube.k8s.io/commit":"90d544f06ea0f69499271b003be64a9a224d57ed","minikube.k8s.io/name":"multinode-890146","minikube.k8s.io/primary":"true","minikube.k8s.io/updated_at":"2024_09_16T11_07_33_0700","minikube.k8s.io/version":"v1.34.0","node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""},"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///run/containerd/containerd.sock","node.alpha.kubernetes.io/ttl":"0","volumes.kubernetes.io/controller-managed-attach-detach":"true"},"mana
gedFields":[{"manager":"kubelet","operation":"Update","apiVersion":"v1" [truncated 11676 chars]
	I0916 11:12:49.055160 2195229 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:12:49.055184 2195229 node_conditions.go:123] node cpu capacity is 2
	I0916 11:12:49.055196 2195229 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:12:49.055201 2195229 node_conditions.go:123] node cpu capacity is 2
	I0916 11:12:49.055206 2195229 node_conditions.go:105] duration metric: took 187.919608ms to run NodePressure ...
	I0916 11:12:49.055226 2195229 start.go:241] waiting for startup goroutines ...
	I0916 11:12:49.055255 2195229 start.go:255] writing updated cluster config ...
	I0916 11:12:49.055579 2195229 ssh_runner.go:195] Run: rm -f paused
	I0916 11:12:49.063054 2195229 out.go:177] * Done! kubectl is now configured to use "multinode-890146" cluster and "default" namespace by default
	E0916 11:12:49.064921 2195229 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	7a47d602557ee       2f6c962e7b831       35 seconds ago      Running             coredns                   2                   9a42783e14183       coredns-7c65d6cfc9-vp22b
	497b048832648       89a35e2ebb6b9       36 seconds ago      Running             busybox                   2                   9648f0dfd2d86       busybox-7dff88458-hf6zl
	74230b72ab3b1       6a23fa8fd2b78       36 seconds ago      Running             kindnet-cni               2                   a7bd1b990f723       kindnet-dbrhk
	2094a8ccce98b       24a140c548c07       36 seconds ago      Running             kube-proxy                2                   d6873d89c67b2       kube-proxy-fm5qr
	876c8caf439c7       ba04bb24b9575       36 seconds ago      Exited              storage-provisioner       3                   cf2e75aefb92d       storage-provisioner
	e8960aff27f5c       27e3830e14027       41 seconds ago      Running             etcd                      2                   d0aa160816bf9       etcd-multinode-890146
	816ffe4e3d0f3       7f8aa378bb47d       41 seconds ago      Running             kube-scheduler            2                   52fc6af79860e       kube-scheduler-multinode-890146
	d50f95689f753       279f381cb3736       41 seconds ago      Running             kube-controller-manager   2                   bbde0562fe760       kube-controller-manager-multinode-890146
	e39af36f1de8e       d3f53a98c0a9d       41 seconds ago      Running             kube-apiserver            2                   88407d2dfdb8b       kube-apiserver-multinode-890146
	d3fb5dfba8901       89a35e2ebb6b9       2 minutes ago       Exited              busybox                   1                   f22f2b114c085       busybox-7dff88458-hf6zl
	0d3abd904fe54       6a23fa8fd2b78       2 minutes ago       Exited              kindnet-cni               1                   eb8cce93a0817       kindnet-dbrhk
	92299bff0d256       2f6c962e7b831       2 minutes ago       Exited              coredns                   1                   912884a0aab78       coredns-7c65d6cfc9-vp22b
	654ecbfed03d8       24a140c548c07       2 minutes ago       Exited              kube-proxy                1                   54bba39675f18       kube-proxy-fm5qr
	07455bc60716a       279f381cb3736       2 minutes ago       Exited              kube-controller-manager   1                   f5d20f97662ab       kube-controller-manager-multinode-890146
	2cc43e414446d       7f8aa378bb47d       2 minutes ago       Exited              kube-scheduler            1                   66f010f223721       kube-scheduler-multinode-890146
	5973d4702c823       d3f53a98c0a9d       2 minutes ago       Exited              kube-apiserver            1                   2303b4bd47452       kube-apiserver-multinode-890146
	9a6e3be38656a       27e3830e14027       2 minutes ago       Exited              etcd                      1                   87f6e771811d3       etcd-multinode-890146
	
	
	==> containerd <==
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.674553120Z" level=info msg="CreateContainer within sandbox \"cf2e75aefb92d575c09ddb5644d7340800ec7e2b8c78b4a00cf490b32b7eda5a\" for &ContainerMetadata{Name:storage-provisioner,Attempt:3,} returns container id \"876c8caf439c71eadceea6d2ac2108269374ff30dc83ed807fdbf24a0fac9129\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.679380671Z" level=info msg="StartContainer for \"876c8caf439c71eadceea6d2ac2108269374ff30dc83ed807fdbf24a0fac9129\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.694187827Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-7dff88458-hf6zl,Uid:8e7abaaa-be47-456f-9980-53cbfcd75f48,Namespace:default,Attempt:2,} returns sandbox id \"9648f0dfd2d869fc91aa9b815daa4bb1dbd3763b3c30bf34cb2a9f6209fc7992\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.701341626Z" level=info msg="CreateContainer within sandbox \"a7bd1b990f72340fd4284ff660212be90ee5a33ada9379b0cebb81338b69adea\" for container &ContainerMetadata{Name:kindnet-cni,Attempt:2,}"
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.709360651Z" level=info msg="CreateContainer within sandbox \"9648f0dfd2d869fc91aa9b815daa4bb1dbd3763b3c30bf34cb2a9f6209fc7992\" for container &ContainerMetadata{Name:busybox,Attempt:2,}"
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.719050969Z" level=info msg="CreateContainer within sandbox \"d6873d89c67b2097c51e53578c3842579ed6abab2557293d644f1722e3973747\" for &ContainerMetadata{Name:kube-proxy,Attempt:2,} returns container id \"2094a8ccce98b1c9cc88aa41c125d07099a341dd911a10885147d915e9f48a3d\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.725631217Z" level=info msg="StartContainer for \"2094a8ccce98b1c9cc88aa41c125d07099a341dd911a10885147d915e9f48a3d\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.749031163Z" level=info msg="CreateContainer within sandbox \"a7bd1b990f72340fd4284ff660212be90ee5a33ada9379b0cebb81338b69adea\" for &ContainerMetadata{Name:kindnet-cni,Attempt:2,} returns container id \"74230b72ab3b13d73d6062b161eaff5ef431b5e5869d2532a3a807465df95acf\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.752617925Z" level=info msg="StartContainer for \"74230b72ab3b13d73d6062b161eaff5ef431b5e5869d2532a3a807465df95acf\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.757445262Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-7c65d6cfc9-vp22b,Uid:a6adb735-448b-480b-aba1-3ce4d56c6fc7,Namespace:kube-system,Attempt:2,} returns sandbox id \"9a42783e14183d2fc21e2ec2a807c4dfaed583fa3a93d2b7fc92fee7dfa382a9\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.757880681Z" level=info msg="CreateContainer within sandbox \"9648f0dfd2d869fc91aa9b815daa4bb1dbd3763b3c30bf34cb2a9f6209fc7992\" for &ContainerMetadata{Name:busybox,Attempt:2,} returns container id \"497b048832648177b3118949e436365559bdbe997c415c4ba7cec608d757e610\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.760913247Z" level=info msg="StartContainer for \"497b048832648177b3118949e436365559bdbe997c415c4ba7cec608d757e610\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.770137501Z" level=info msg="CreateContainer within sandbox \"9a42783e14183d2fc21e2ec2a807c4dfaed583fa3a93d2b7fc92fee7dfa382a9\" for container &ContainerMetadata{Name:coredns,Attempt:2,}"
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.795823170Z" level=info msg="CreateContainer within sandbox \"9a42783e14183d2fc21e2ec2a807c4dfaed583fa3a93d2b7fc92fee7dfa382a9\" for &ContainerMetadata{Name:coredns,Attempt:2,} returns container id \"7a47d602557eec956c007e2d7757a72e355b355c0d0a3f1b906c8ceb986b0027\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.799106344Z" level=info msg="StartContainer for \"7a47d602557eec956c007e2d7757a72e355b355c0d0a3f1b906c8ceb986b0027\""
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.805569233Z" level=info msg="StartContainer for \"876c8caf439c71eadceea6d2ac2108269374ff30dc83ed807fdbf24a0fac9129\" returns successfully"
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.920692289Z" level=info msg="StartContainer for \"497b048832648177b3118949e436365559bdbe997c415c4ba7cec608d757e610\" returns successfully"
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.930497601Z" level=info msg="StartContainer for \"7a47d602557eec956c007e2d7757a72e355b355c0d0a3f1b906c8ceb986b0027\" returns successfully"
	Sep 16 11:12:14 multinode-890146 containerd[570]: time="2024-09-16T11:12:14.935734274Z" level=info msg="StartContainer for \"2094a8ccce98b1c9cc88aa41c125d07099a341dd911a10885147d915e9f48a3d\" returns successfully"
	Sep 16 11:12:15 multinode-890146 containerd[570]: time="2024-09-16T11:12:15.052890828Z" level=info msg="StartContainer for \"74230b72ab3b13d73d6062b161eaff5ef431b5e5869d2532a3a807465df95acf\" returns successfully"
	Sep 16 11:12:44 multinode-890146 containerd[570]: time="2024-09-16T11:12:44.865422728Z" level=info msg="shim disconnected" id=876c8caf439c71eadceea6d2ac2108269374ff30dc83ed807fdbf24a0fac9129 namespace=k8s.io
	Sep 16 11:12:44 multinode-890146 containerd[570]: time="2024-09-16T11:12:44.866242268Z" level=warning msg="cleaning up after shim disconnected" id=876c8caf439c71eadceea6d2ac2108269374ff30dc83ed807fdbf24a0fac9129 namespace=k8s.io
	Sep 16 11:12:44 multinode-890146 containerd[570]: time="2024-09-16T11:12:44.866277230Z" level=info msg="cleaning up dead shim" namespace=k8s.io
	Sep 16 11:12:45 multinode-890146 containerd[570]: time="2024-09-16T11:12:45.305547796Z" level=info msg="RemoveContainer for \"07054c18240bddcff9366f96beae270b49083c8c0a3d760859239bbf844ea0cd\""
	Sep 16 11:12:45 multinode-890146 containerd[570]: time="2024-09-16T11:12:45.316092845Z" level=info msg="RemoveContainer for \"07054c18240bddcff9366f96beae270b49083c8c0a3d760859239bbf844ea0cd\" returns successfully"
	
	
	==> coredns [7a47d602557eec956c007e2d7757a72e355b355c0d0a3f1b906c8ceb986b0027] <==
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[INFO] plugin/kubernetes: waiting for Kubernetes API before starting server
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 75e5db48a73272e2c90919c8256e5cca0293ae0ed689e2ed44f1254a9589c3d004cb3e693d059116718c47e9305987b828b11b2735a1cefa59e4a9489dda5cee
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:54064 - 32724 "HINFO IN 3899121271395949486.8127728670961620316. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.047538522s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[147785201]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:12:14.950) (total time: 30000ms):
	Trace[147785201]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (11:12:44.951)
	Trace[147785201]: [30.000721065s] [30.000721065s] END
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[418133117]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:12:14.947) (total time: 30004ms):
	Trace[418133117]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30004ms (11:12:44.951)
	Trace[418133117]: [30.004505824s] [30.004505824s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[973249041]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:12:14.951) (total time: 30000ms):
	Trace[973249041]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30000ms (11:12:44.951)
	Trace[973249041]: [30.000600089s] [30.000600089s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> coredns [92299bff0d2567667e4546c0c60418026422290d336e9613b5aed5df8af84cc5] <==
	[WARNING] plugin/kubernetes: starting server with unsynced Kubernetes API
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 75e5db48a73272e2c90919c8256e5cca0293ae0ed689e2ed44f1254a9589c3d004cb3e693d059116718c47e9305987b828b11b2735a1cefa59e4a9489dda5cee
	CoreDNS-1.11.3
	linux/arm64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:50907 - 6321 "HINFO IN 4361026237914167834.6224583088720257608. udp 57 false 512" NXDOMAIN qr,rd,ra 57 0.0149415s
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/ready: Still waiting on: "kubernetes"
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[950690769]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:09:56.963) (total time: 30001ms):
	Trace[950690769]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (11:10:26.964)
	Trace[950690769]: [30.001619643s] [30.001619643s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Service: failed to list *v1.Service: Get "https://10.96.0.1:443/api/v1/services?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[1310660323]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:09:56.963) (total time: 30001ms):
	Trace[1310660323]: ---"Objects listed" error:Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30001ms (11:10:26.965)
	Trace[1310660323]: [30.001340104s] [30.001340104s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.EndpointSlice: failed to list *v1.EndpointSlice: Get "https://10.96.0.1:443/apis/discovery.k8s.io/v1/endpointslices?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	[INFO] plugin/kubernetes: Trace[2008288180]: "Reflector ListAndWatch" name:pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229 (16-Sep-2024 11:09:56.964) (total time: 30002ms):
	Trace[2008288180]: ---"Objects listed" error:Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout 30002ms (11:10:26.966)
	Trace[2008288180]: [30.002837778s] [30.002837778s] END
	[ERROR] plugin/kubernetes: pkg/mod/k8s.io/client-go@v0.29.3/tools/cache/reflector.go:229: Failed to watch *v1.Namespace: failed to list *v1.Namespace: Get "https://10.96.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0": dial tcp 10.96.0.1:443: i/o timeout
	
	
	==> describe nodes <==
	Name:               multinode-890146
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-890146
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-890146
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_07_33_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:07:30 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-890146
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:12:43 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:12:13 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:12:13 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:12:13 +0000   Mon, 16 Sep 2024 11:07:27 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:12:13 +0000   Mon, 16 Sep 2024 11:07:30 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.58.2
	  Hostname:    multinode-890146
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 75ffee0e731045019a1b335f039d5bb3
	  System UUID:                2cb24a37-7b71-4957-b8fd-d0da5c3f8b7a
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-hf6zl                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m37s
	  kube-system                 coredns-7c65d6cfc9-vp22b                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     5m14s
	  kube-system                 etcd-multinode-890146                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         5m18s
	  kube-system                 kindnet-dbrhk                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      5m14s
	  kube-system                 kube-apiserver-multinode-890146             250m (12%)    0 (0%)      0 (0%)           0 (0%)         5m18s
	  kube-system                 kube-controller-manager-multinode-890146    200m (10%)    0 (0%)      0 (0%)           0 (0%)         5m18s
	  kube-system                 kube-proxy-fm5qr                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         5m14s
	  kube-system                 kube-scheduler-multinode-890146             100m (5%)     0 (0%)      0 (0%)           0 (0%)         5m20s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         5m13s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 5m12s                  kube-proxy       
	  Normal   Starting                 35s                    kube-proxy       
	  Normal   Starting                 2m53s                  kube-proxy       
	  Normal   Starting                 5m26s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 5m26s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  5m26s (x8 over 5m26s)  kubelet          Node multinode-890146 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    5m26s (x7 over 5m26s)  kubelet          Node multinode-890146 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     5m26s (x7 over 5m26s)  kubelet          Node multinode-890146 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  5m26s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   Starting                 5m19s                  kubelet          Starting kubelet.
	  Warning  CgroupV1                 5m19s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  5m19s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientPID     5m18s                  kubelet          Node multinode-890146 status is now: NodeHasSufficientPID
	  Normal   NodeHasNoDiskPressure    5m18s                  kubelet          Node multinode-890146 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientMemory  5m18s                  kubelet          Node multinode-890146 status is now: NodeHasSufficientMemory
	  Normal   RegisteredNode           5m15s                  node-controller  Node multinode-890146 event: Registered Node multinode-890146 in Controller
	  Normal   NodeAllocatableEnforced  3m1s                   kubelet          Updated Node Allocatable limit across pods
	  Warning  CgroupV1                 3m1s                   kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  3m1s (x8 over 3m1s)    kubelet          Node multinode-890146 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    3m1s (x7 over 3m1s)    kubelet          Node multinode-890146 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     3m1s (x7 over 3m1s)    kubelet          Node multinode-890146 status is now: NodeHasSufficientPID
	  Normal   Starting                 3m1s                   kubelet          Starting kubelet.
	  Normal   RegisteredNode           2m53s                  node-controller  Node multinode-890146 event: Registered Node multinode-890146 in Controller
	  Normal   Starting                 43s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 43s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  43s (x8 over 43s)      kubelet          Node multinode-890146 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    43s (x7 over 43s)      kubelet          Node multinode-890146 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     43s (x7 over 43s)      kubelet          Node multinode-890146 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  43s                    kubelet          Updated Node Allocatable limit across pods
	  Normal   RegisteredNode           35s                    node-controller  Node multinode-890146 event: Registered Node multinode-890146 in Controller
	
	
	Name:               multinode-890146-m02
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-890146-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=multinode-890146
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2024_09_16T11_08_09_0700
	                    minikube.k8s.io/version=v1.34.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:08:09 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-890146-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:12:48 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:12:28 +0000   Mon, 16 Sep 2024 11:10:53 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:12:28 +0000   Mon, 16 Sep 2024 11:10:53 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:12:28 +0000   Mon, 16 Sep 2024 11:10:53 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:12:28 +0000   Mon, 16 Sep 2024 11:10:53 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.58.3
	  Hostname:    multinode-890146-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 ac9032b71ff04cd28f64284bc6078711
	  System UUID:                afe70f4d-0cb5-4f79-97b8-28a81db2fa30
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                       CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                       ------------  ----------  ---------------  -------------  ---
	  default                     busybox-7dff88458-wrnfh    0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m37s
	  kube-system                 kindnet-4sjj6              100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      4m42s
	  kube-system                 kube-proxy-59f9h           0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m42s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type     Reason                   Age                    From             Message
	  ----     ------                   ----                   ----             -------
	  Normal   Starting                 17s                    kube-proxy       
	  Normal   Starting                 4m39s                  kube-proxy       
	  Normal   Starting                 106s                   kube-proxy       
	  Warning  CgroupV1                 4m42s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeHasSufficientMemory  4m42s (x2 over 4m42s)  kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    4m42s (x2 over 4m42s)  kubelet          Node multinode-890146-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     4m42s (x2 over 4m42s)  kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  4m42s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeReady                4m41s                  kubelet          Node multinode-890146-m02 status is now: NodeReady
	  Normal   RegisteredNode           4m40s                  node-controller  Node multinode-890146-m02 event: Registered Node multinode-890146-m02 in Controller
	  Normal   RegisteredNode           2m53s                  node-controller  Node multinode-890146-m02 event: Registered Node multinode-890146-m02 in Controller
	  Normal   NodeNotReady             2m13s                  node-controller  Node multinode-890146-m02 status is now: NodeNotReady
	  Warning  CgroupV1                 2m11s                  kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  2m11s                  kubelet          Updated Node Allocatable limit across pods
	  Normal   Starting                 2m11s                  kubelet          Starting kubelet.
	  Normal   NodeHasNoDiskPressure    2m4s (x7 over 2m11s)   kubelet          Node multinode-890146-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     2m4s (x7 over 2m11s)   kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientPID
	  Normal   NodeHasSufficientMemory  118s (x8 over 2m11s)   kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientMemory
	  Normal   RegisteredNode           35s                    node-controller  Node multinode-890146-m02 event: Registered Node multinode-890146-m02 in Controller
	  Normal   Starting                 30s                    kubelet          Starting kubelet.
	  Warning  CgroupV1                 30s                    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  30s                    kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  23s (x7 over 30s)      kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    23s (x7 over 30s)      kubelet          Node multinode-890146-m02 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     23s (x7 over 30s)      kubelet          Node multinode-890146-m02 status is now: NodeHasSufficientPID
	
	
	==> dmesg <==
	
	
	==> etcd [9a6e3be38656a34c99c98c1d83ac245ced91c2c4e06160058130d7bdf77a6cb2] <==
	{"level":"info","ts":"2024-09-16T11:09:51.773024Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"3a56e4ca95e2355c","local-member-id":"b2c6679ac05f2cf1","added-peer-id":"b2c6679ac05f2cf1","added-peer-peer-urls":["https://192.168.58.2:2380"]}
	{"level":"info","ts":"2024-09-16T11:09:51.773119Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"3a56e4ca95e2355c","local-member-id":"b2c6679ac05f2cf1","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:09:51.773157Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:09:51.779849Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:09:51.782047Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T11:09:51.782312Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"b2c6679ac05f2cf1","initial-advertise-peer-urls":["https://192.168.58.2:2380"],"listen-peer-urls":["https://192.168.58.2:2380"],"advertise-client-urls":["https://192.168.58.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.58.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:09:51.782345Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:09:51.782599Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:09:51.782610Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:09:53.150722Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 is starting a new election at term 2"}
	{"level":"info","ts":"2024-09-16T11:09:53.150971Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became pre-candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:09:53.151084Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgPreVoteResp from b2c6679ac05f2cf1 at term 2"}
	{"level":"info","ts":"2024-09-16T11:09:53.151186Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became candidate at term 3"}
	{"level":"info","ts":"2024-09-16T11:09:53.151270Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgVoteResp from b2c6679ac05f2cf1 at term 3"}
	{"level":"info","ts":"2024-09-16T11:09:53.151366Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became leader at term 3"}
	{"level":"info","ts":"2024-09-16T11:09:53.151450Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: b2c6679ac05f2cf1 elected leader b2c6679ac05f2cf1 at term 3"}
	{"level":"info","ts":"2024-09-16T11:09:53.154994Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:09:53.156096Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:09:53.157207Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:09:53.154957Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"b2c6679ac05f2cf1","local-member-attributes":"{Name:multinode-890146 ClientURLs:[https://192.168.58.2:2379]}","request-path":"/0/members/b2c6679ac05f2cf1/attributes","cluster-id":"3a56e4ca95e2355c","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:09:53.165113Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:09:53.165419Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:09:53.165533Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:09:53.166421Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:09:53.167510Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.58.2:2379"}
	
	
	==> etcd [e8960aff27f5cc155b12a5ab8aed7fcb26ac1651f11e020a875a956e41de5cb9] <==
	{"level":"info","ts":"2024-09-16T11:12:09.288328Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"3a56e4ca95e2355c","local-member-id":"b2c6679ac05f2cf1","added-peer-id":"b2c6679ac05f2cf1","added-peer-peer-urls":["https://192.168.58.2:2380"]}
	{"level":"info","ts":"2024-09-16T11:12:09.288550Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"3a56e4ca95e2355c","local-member-id":"b2c6679ac05f2cf1","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:12:09.288723Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:12:09.286800Z","caller":"etcdserver/server.go:767","msg":"starting initial election tick advance","election-ticks":10}
	{"level":"info","ts":"2024-09-16T11:12:09.315519Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T11:12:09.315873Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:12:09.316002Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.58.2:2380"}
	{"level":"info","ts":"2024-09-16T11:12:09.316426Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"b2c6679ac05f2cf1","initial-advertise-peer-urls":["https://192.168.58.2:2380"],"listen-peer-urls":["https://192.168.58.2:2380"],"advertise-client-urls":["https://192.168.58.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.58.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:12:09.316674Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:12:10.830759Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 is starting a new election at term 3"}
	{"level":"info","ts":"2024-09-16T11:12:10.830816Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became pre-candidate at term 3"}
	{"level":"info","ts":"2024-09-16T11:12:10.830851Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgPreVoteResp from b2c6679ac05f2cf1 at term 3"}
	{"level":"info","ts":"2024-09-16T11:12:10.831080Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became candidate at term 4"}
	{"level":"info","ts":"2024-09-16T11:12:10.831199Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 received MsgVoteResp from b2c6679ac05f2cf1 at term 4"}
	{"level":"info","ts":"2024-09-16T11:12:10.831306Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"b2c6679ac05f2cf1 became leader at term 4"}
	{"level":"info","ts":"2024-09-16T11:12:10.831402Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: b2c6679ac05f2cf1 elected leader b2c6679ac05f2cf1 at term 4"}
	{"level":"info","ts":"2024-09-16T11:12:10.834904Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"b2c6679ac05f2cf1","local-member-attributes":"{Name:multinode-890146 ClientURLs:[https://192.168.58.2:2379]}","request-path":"/0/members/b2c6679ac05f2cf1/attributes","cluster-id":"3a56e4ca95e2355c","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:12:10.835099Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:12:10.835140Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:12:10.839493Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:12:10.840621Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:12:10.843492Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:12:10.846443Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:12:10.846537Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:12:10.847512Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.58.2:2379"}
	
	
	==> kernel <==
	 11:12:51 up 1 day, 14:55,  0 users,  load average: 2.16, 2.08, 2.10
	Linux multinode-890146 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kindnet [0d3abd904fe54a1978d08d9978009d74bddb24839e5a5ca370830593e207f392] <==
	I0916 11:11:07.319711       1 main.go:299] handling current node
	I0916 11:11:07.319727       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:11:07.319733       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:11:07.320085       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:11:07.320104       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:11:17.327031       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:11:17.327071       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:11:17.327272       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:11:17.327288       1 main.go:299] handling current node
	I0916 11:11:17.327307       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:11:17.327398       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:11:27.321666       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:11:27.321706       1 main.go:299] handling current node
	I0916 11:11:27.321722       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:11:27.321729       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:11:27.321829       1 main.go:295] Handling node with IPs: map[192.168.58.4:{}]
	I0916 11:11:27.321835       1 main.go:322] Node multinode-890146-m03 has CIDR [10.244.2.0/24] 
	I0916 11:11:37.326769       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:11:37.326805       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:11:37.326915       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:11:37.326929       1 main.go:299] handling current node
	I0916 11:11:47.326740       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:11:47.326780       1 main.go:299] handling current node
	I0916 11:11:47.326796       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:11:47.326802       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	
	
	==> kindnet [74230b72ab3b13d73d6062b161eaff5ef431b5e5869d2532a3a807465df95acf] <==
	I0916 11:12:15.043114       1 main.go:109] connected to apiserver: https://10.96.0.1:443
	I0916 11:12:15.043464       1 main.go:139] hostIP = 192.168.58.2
	podIP = 192.168.58.2
	I0916 11:12:15.122513       1 main.go:148] setting mtu 1500 for CNI 
	I0916 11:12:15.130484       1 main.go:178] kindnetd IP family: "ipv4"
	I0916 11:12:15.130816       1 main.go:182] noMask IPv4 subnets: [10.244.0.0/16]
	I0916 11:12:15.521410       1 controller.go:334] Starting controller kube-network-policies
	I0916 11:12:15.522087       1 controller.go:338] Waiting for informer caches to sync
	I0916 11:12:15.522225       1 shared_informer.go:313] Waiting for caches to sync for kube-network-policies
	I0916 11:12:15.622962       1 shared_informer.go:320] Caches are synced for kube-network-policies
	I0916 11:12:15.623561       1 metrics.go:61] Registering metrics
	I0916 11:12:15.623766       1 controller.go:374] Syncing nftables rules
	I0916 11:12:25.522869       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:12:25.522946       1 main.go:299] handling current node
	I0916 11:12:25.525389       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:12:25.525436       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:12:25.525595       1 routes.go:62] Adding route {Ifindex: 0 Dst: 10.244.1.0/24 Src: <nil> Gw: 192.168.58.3 Flags: [] Table: 0} 
	I0916 11:12:35.519518       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:12:35.519557       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	I0916 11:12:35.519739       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:12:35.519815       1 main.go:299] handling current node
	I0916 11:12:45.520093       1 main.go:295] Handling node with IPs: map[192.168.58.2:{}]
	I0916 11:12:45.520159       1 main.go:299] handling current node
	I0916 11:12:45.520177       1 main.go:295] Handling node with IPs: map[192.168.58.3:{}]
	I0916 11:12:45.520184       1 main.go:322] Node multinode-890146-m02 has CIDR [10.244.1.0/24] 
	
	
	==> kube-apiserver [5973d4702c82301758aca3fa2a6a770d5ce1c6ff9abd4830207a977a63162fdc] <==
	I0916 11:09:55.487857       1 establishing_controller.go:81] Starting EstablishingController
	I0916 11:09:55.487882       1 nonstructuralschema_controller.go:195] Starting NonStructuralSchemaConditionController
	I0916 11:09:55.487897       1 apiapproval_controller.go:189] Starting KubernetesAPIApprovalPolicyConformantConditionController
	I0916 11:09:55.487908       1 crd_finalizer.go:269] Starting CRDFinalizer
	I0916 11:09:55.633515       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 11:09:55.634246       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 11:09:55.634429       1 aggregator.go:171] initial CRD sync complete...
	I0916 11:09:55.634810       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 11:09:55.634969       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 11:09:55.635042       1 cache.go:39] Caches are synced for autoregister controller
	I0916 11:09:55.646727       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 11:09:55.646836       1 policy_source.go:224] refreshing policies
	I0916 11:09:55.646962       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 11:09:55.713231       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 11:09:55.716777       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 11:09:55.717073       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 11:09:55.717359       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 11:09:55.717375       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 11:09:55.718065       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 11:09:55.720363       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	E0916 11:09:55.731730       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 11:09:55.734411       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:09:56.420819       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:09:59.090863       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:09:59.333962       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	
	
	==> kube-apiserver [e39af36f1de8e490510ffb9d917357ecd01fd05e7f573030452365877080e16d] <==
	I0916 11:12:12.939090       1 apiservice_controller.go:100] Starting APIServiceRegistrationController
	I0916 11:12:12.939104       1 cache.go:32] Waiting for caches to sync for APIServiceRegistrationController controller
	I0916 11:12:13.004698       1 dynamic_cafile_content.go:160] "Starting controller" name="client-ca-bundle::/var/lib/minikube/certs/ca.crt"
	I0916 11:12:13.010861       1 dynamic_cafile_content.go:160] "Starting controller" name="request-header::/var/lib/minikube/certs/front-proxy-ca.crt"
	I0916 11:12:13.139003       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 11:12:13.151395       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0916 11:12:13.189147       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 11:12:13.189341       1 policy_source.go:224] refreshing policies
	I0916 11:12:13.222066       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:12:13.229963       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 11:12:13.230137       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 11:12:13.230085       1 shared_informer.go:320] Caches are synced for configmaps
	I0916 11:12:13.232168       1 cache.go:39] Caches are synced for RemoteAvailability controller
	I0916 11:12:13.232364       1 shared_informer.go:320] Caches are synced for crd-autoregister
	I0916 11:12:13.232910       1 aggregator.go:171] initial CRD sync complete...
	I0916 11:12:13.232984       1 autoregister_controller.go:144] Starting autoregister controller
	I0916 11:12:13.233018       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0916 11:12:13.233066       1 cache.go:39] Caches are synced for autoregister controller
	I0916 11:12:13.239062       1 cache.go:39] Caches are synced for LocalAvailability controller
	I0916 11:12:13.239168       1 cache.go:39] Caches are synced for APIServiceRegistrationController controller
	I0916 11:12:13.244891       1 handler_discovery.go:450] Starting ResourceDiscoveryManager
	E0916 11:12:13.254883       1 controller.go:97] Error removing old endpoints from kubernetes service: no API server IP addresses were listed in storage, refusing to erase all endpoints for the kubernetes Service
	I0916 11:12:13.943592       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:12:16.769293       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:12:16.868899       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	
	
	==> kube-controller-manager [07455bc60716ac512dd7e5994733e02cc35ce9f026df34c778b61b0551008067] <==
	I0916 11:10:34.141882       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="190.399µs"
	I0916 11:10:38.866338       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:10:38.866654       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:10:38.869927       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:38.884252       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:10:38.893710       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:38.927610       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="12.231812ms"
	I0916 11:10:38.928406       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="60.808µs"
	I0916 11:10:44.011777       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:53.732505       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:10:53.733023       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:53.744789       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:53.931755       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:10:54.086927       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:03.972440       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="49.846µs"
	I0916 11:11:05.041793       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="11.011584ms"
	I0916 11:11:05.042014       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="71.409µs"
	I0916 11:11:19.990147       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:19.990739       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:11:20.022943       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:23.948848       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:28.044133       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:28.063057       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	I0916 11:11:28.628744       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:11:28.629038       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m03"
	
	
	==> kube-controller-manager [d50f95689f753be20deac2d09fb8f9fed48faab083b55fcd5c3cde9cda958476] <==
	I0916 11:12:16.464219       1 shared_informer.go:320] Caches are synced for PVC protection
	I0916 11:12:16.465254       1 shared_informer.go:320] Caches are synced for certificate-csrsigning-kube-apiserver-client
	I0916 11:12:16.465422       1 shared_informer.go:320] Caches are synced for certificate-csrsigning-legacy-unknown
	I0916 11:12:16.467943       1 shared_informer.go:320] Caches are synced for crt configmap
	I0916 11:12:16.492150       1 shared_informer.go:320] Caches are synced for deployment
	I0916 11:12:16.503699       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 11:12:16.503961       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-890146-m02"
	I0916 11:12:16.512567       1 shared_informer.go:320] Caches are synced for bootstrap_signer
	I0916 11:12:16.545296       1 shared_informer.go:320] Caches are synced for endpoint_slice_mirroring
	I0916 11:12:16.595191       1 shared_informer.go:320] Caches are synced for cronjob
	I0916 11:12:16.663475       1 shared_informer.go:320] Caches are synced for HPA
	I0916 11:12:16.667972       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 11:12:16.685353       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 11:12:16.825286       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="369.265232ms"
	I0916 11:12:16.825904       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="575.431µs"
	I0916 11:12:17.103774       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 11:12:17.140658       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 11:12:17.140700       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 11:12:28.103935       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="multinode-890146-m02"
	I0916 11:12:32.686327       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="7.544353ms"
	I0916 11:12:32.687008       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="32.615µs"
	I0916 11:12:33.789512       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="22.555593ms"
	I0916 11:12:33.789941       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-7dff88458" duration="48.131µs"
	I0916 11:12:47.289595       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="10.208732ms"
	I0916 11:12:47.289901       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="75.224µs"
	
	
	==> kube-proxy [2094a8ccce98b1c9cc88aa41c125d07099a341dd911a10885147d915e9f48a3d] <==
	I0916 11:12:15.049910       1 server_linux.go:66] "Using iptables proxy"
	I0916 11:12:15.201397       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.58.2"]
	E0916 11:12:15.209149       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 11:12:15.294942       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 11:12:15.295074       1 server_linux.go:169] "Using iptables Proxier"
	I0916 11:12:15.308950       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 11:12:15.311941       1 server.go:483] "Version info" version="v1.31.1"
	I0916 11:12:15.312111       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:12:15.321784       1 config.go:199] "Starting service config controller"
	I0916 11:12:15.321944       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 11:12:15.322083       1 config.go:105] "Starting endpoint slice config controller"
	I0916 11:12:15.322184       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 11:12:15.323606       1 config.go:328] "Starting node config controller"
	I0916 11:12:15.323781       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 11:12:15.422492       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 11:12:15.422738       1 shared_informer.go:320] Caches are synced for service config
	I0916 11:12:15.424061       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-proxy [654ecbfed03d8e141ce6b52bf8bf1bd98bc2fcb02e1f9df6947b6c89d77bde4b] <==
	I0916 11:09:56.888587       1 server_linux.go:66] "Using iptables proxy"
	I0916 11:09:57.098848       1 server.go:677] "Successfully retrieved node IP(s)" IPs=["192.168.58.2"]
	E0916 11:09:57.099108       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0916 11:09:57.134068       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0916 11:09:57.134295       1 server_linux.go:169] "Using iptables Proxier"
	I0916 11:09:57.138428       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0916 11:09:57.139452       1 server.go:483] "Version info" version="v1.31.1"
	I0916 11:09:57.139482       1 server.go:485] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:09:57.144752       1 config.go:328] "Starting node config controller"
	I0916 11:09:57.144816       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0916 11:09:57.145039       1 config.go:199] "Starting service config controller"
	I0916 11:09:57.145095       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0916 11:09:57.145196       1 config.go:105] "Starting endpoint slice config controller"
	I0916 11:09:57.145235       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0916 11:09:57.245716       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0916 11:09:57.245735       1 shared_informer.go:320] Caches are synced for service config
	I0916 11:09:57.246330       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [2cc43e414446d6a831068e3af8cf0e1b1501b91167342420a33bdc74bc31c020] <==
	W0916 11:09:55.628657       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0916 11:09:55.628676       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.628756       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0916 11:09:55.628784       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.628837       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0916 11:09:55.628872       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.629209       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0916 11:09:55.629372       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.629579       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0916 11:09:55.629714       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.629937       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0916 11:09:55.637784       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.637395       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0916 11:09:55.637851       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.637587       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0916 11:09:55.637872       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.637674       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0916 11:09:55.637973       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.637729       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0916 11:09:55.638060       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.638206       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0916 11:09:55.638278       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0916 11:09:55.642795       1 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0916 11:09:55.642891       1 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	I0916 11:09:55.696400       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kube-scheduler [816ffe4e3d0f35ff5d46ddef1c41d4f5ec4bbb4a5ae7d4ccfffd76d4b99eeecb] <==
	I0916 11:12:10.779584       1 serving.go:386] Generated self-signed cert in-memory
	W0916 11:12:13.099538       1 requestheader_controller.go:196] Unable to get configmap/extension-apiserver-authentication in kube-system.  Usually fixed by 'kubectl create rolebinding -n kube-system ROLEBINDING_NAME --role=extension-apiserver-authentication-reader --serviceaccount=YOUR_NS:YOUR_SA'
	W0916 11:12:13.099647       1 authentication.go:370] Error looking up in-cluster authentication configuration: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot get resource "configmaps" in API group "" in the namespace "kube-system"
	W0916 11:12:13.099678       1 authentication.go:371] Continuing without authentication configuration. This may treat all requests as anonymous.
	W0916 11:12:13.099733       1 authentication.go:372] To require authentication configuration lookup to succeed, set --authentication-tolerate-lookup-failure=false
	I0916 11:12:13.160121       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 11:12:13.160152       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:12:13.163082       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 11:12:13.163345       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 11:12:13.163377       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 11:12:13.163394       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 11:12:13.264115       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:12:13 multinode-890146 kubelet[660]: I0916 11:12:13.231865     660 kubelet_node_status.go:111] "Node was previously registered" node="multinode-890146"
	Sep 16 11:12:13 multinode-890146 kubelet[660]: I0916 11:12:13.232549     660 kubelet_node_status.go:75] "Successfully registered node" node="multinode-890146"
	Sep 16 11:12:13 multinode-890146 kubelet[660]: I0916 11:12:13.232691     660 kuberuntime_manager.go:1635] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Sep 16 11:12:13 multinode-890146 kubelet[660]: I0916 11:12:13.234009     660 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Sep 16 11:12:14 multinode-890146 kubelet[660]: I0916 11:12:14.008987     660 apiserver.go:52] "Watching apiserver"
	Sep 16 11:12:14 multinode-890146 kubelet[660]: I0916 11:12:14.033876     660 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world"
	Sep 16 11:12:14 multinode-890146 kubelet[660]: I0916 11:12:14.119962     660 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-cfg\" (UniqueName: \"kubernetes.io/host-path/24ef6be7-a1ab-41f7-83c8-aa5af5007281-cni-cfg\") pod \"kindnet-dbrhk\" (UID: \"24ef6be7-a1ab-41f7-83c8-aa5af5007281\") " pod="kube-system/kindnet-dbrhk"
	Sep 16 11:12:14 multinode-890146 kubelet[660]: I0916 11:12:14.120050     660 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/97795413-5c7a-480b-9cbd-18d4dea5669b-tmp\") pod \"storage-provisioner\" (UID: \"97795413-5c7a-480b-9cbd-18d4dea5669b\") " pod="kube-system/storage-provisioner"
	Sep 16 11:12:14 multinode-890146 kubelet[660]: I0916 11:12:14.120087     660 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/24ef6be7-a1ab-41f7-83c8-aa5af5007281-lib-modules\") pod \"kindnet-dbrhk\" (UID: \"24ef6be7-a1ab-41f7-83c8-aa5af5007281\") " pod="kube-system/kindnet-dbrhk"
	Sep 16 11:12:14 multinode-890146 kubelet[660]: I0916 11:12:14.120105     660 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73-lib-modules\") pod \"kube-proxy-fm5qr\" (UID: \"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73\") " pod="kube-system/kube-proxy-fm5qr"
	Sep 16 11:12:14 multinode-890146 kubelet[660]: I0916 11:12:14.120156     660 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73-xtables-lock\") pod \"kube-proxy-fm5qr\" (UID: \"8e24c9ec-b2ec-461d-97d3-4bb6e51d6a73\") " pod="kube-system/kube-proxy-fm5qr"
	Sep 16 11:12:14 multinode-890146 kubelet[660]: I0916 11:12:14.120179     660 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/24ef6be7-a1ab-41f7-83c8-aa5af5007281-xtables-lock\") pod \"kindnet-dbrhk\" (UID: \"24ef6be7-a1ab-41f7-83c8-aa5af5007281\") " pod="kube-system/kindnet-dbrhk"
	Sep 16 11:12:14 multinode-890146 kubelet[660]: I0916 11:12:14.134365     660 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 11:12:17 multinode-890146 kubelet[660]: I0916 11:12:17.260259     660 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness"
	Sep 16 11:12:18 multinode-890146 kubelet[660]: E0916 11:12:18.164748     660 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 11:12:18 multinode-890146 kubelet[660]: E0916 11:12:18.164803     660 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 11:12:28 multinode-890146 kubelet[660]: E0916 11:12:28.197219     660 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 11:12:28 multinode-890146 kubelet[660]: E0916 11:12:28.197272     660 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 11:12:38 multinode-890146 kubelet[660]: E0916 11:12:38.217785     660 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 11:12:38 multinode-890146 kubelet[660]: E0916 11:12:38.217840     660 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	Sep 16 11:12:45 multinode-890146 kubelet[660]: I0916 11:12:45.301875     660 scope.go:117] "RemoveContainer" containerID="07054c18240bddcff9366f96beae270b49083c8c0a3d760859239bbf844ea0cd"
	Sep 16 11:12:45 multinode-890146 kubelet[660]: I0916 11:12:45.302420     660 scope.go:117] "RemoveContainer" containerID="876c8caf439c71eadceea6d2ac2108269374ff30dc83ed807fdbf24a0fac9129"
	Sep 16 11:12:45 multinode-890146 kubelet[660]: E0916 11:12:45.302583     660 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"storage-provisioner\" with CrashLoopBackOff: \"back-off 10s restarting failed container=storage-provisioner pod=storage-provisioner_kube-system(97795413-5c7a-480b-9cbd-18d4dea5669b)\"" pod="kube-system/storage-provisioner" podUID="97795413-5c7a-480b-9cbd-18d4dea5669b"
	Sep 16 11:12:48 multinode-890146 kubelet[660]: E0916 11:12:48.235514     660 summary_sys_containers.go:51] "Failed to get system container stats" err="failed to get cgroup stats for \"/kubepods\": failed to get container info for \"/kubepods\": unknown container \"/kubepods\"" containerName="/kubepods"
	Sep 16 11:12:48 multinode-890146 kubelet[660]: E0916 11:12:48.235566     660 helpers.go:854] "Eviction manager: failed to construct signal" err="system container \"pods\" not found in metrics" signal="allocatableMemory.available"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p multinode-890146 -n multinode-890146
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (480.415µs)
helpers_test.go:263: kubectl --context multinode-890146 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
--- FAIL: TestMultiNode/serial/RestartMultiNode (51.60s)

                                                
                                    
x
+
TestPreload (18.42s)

                                                
                                                
=== RUN   TestPreload
preload_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p test-preload-803604 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.24.4
E0916 11:13:33.956681 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
preload_test.go:44: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p test-preload-803604 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.24.4: exit status 100 (16.12647577s)

                                                
                                                
-- stdout --
	* [test-preload-803604] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=19651
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Using the docker driver based on user configuration
	* Using Docker driver with root privileges
	* Starting "test-preload-803604" primary control-plane node in "test-preload-803604" cluster
	* Pulling base image v0.0.45-1726358845-19644 ...
	* Creating docker container (CPUs=2, Memory=2200MB) ...
	* Preparing Kubernetes v1.24.4 on containerd 1.7.22 ...
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 11:13:30.589492 2203017 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:13:30.589626 2203017 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:13:30.589637 2203017 out.go:358] Setting ErrFile to fd 2...
	I0916 11:13:30.589643 2203017 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:13:30.589885 2203017 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 11:13:30.590350 2203017 out.go:352] Setting JSON to false
	I0916 11:13:30.591363 2203017 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":140153,"bootTime":1726345058,"procs":188,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 11:13:30.591441 2203017 start.go:139] virtualization:  
	I0916 11:13:30.594526 2203017 out.go:177] * [test-preload-803604] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:13:30.597097 2203017 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:13:30.597168 2203017 notify.go:220] Checking for updates...
	I0916 11:13:30.601131 2203017 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:13:30.603390 2203017 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:13:30.605127 2203017 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 11:13:30.606850 2203017 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:13:30.608397 2203017 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 11:13:30.610543 2203017 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:13:30.640162 2203017 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:13:30.640319 2203017 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:13:30.696898 2203017 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:47 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 11:13:30.686583545 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:13:30.697029 2203017 docker.go:318] overlay module found
	I0916 11:13:30.699149 2203017 out.go:177] * Using the docker driver based on user configuration
	I0916 11:13:30.700694 2203017 start.go:297] selected driver: docker
	I0916 11:13:30.700715 2203017 start.go:901] validating driver "docker" against <nil>
	I0916 11:13:30.700736 2203017 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:13:30.701477 2203017 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:13:30.757928 2203017 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:47 OomKillDisable:true NGoroutines:61 SystemTime:2024-09-16 11:13:30.74844836 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aarc
h64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:13:30.758145 2203017 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 11:13:30.758370 2203017 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0916 11:13:30.760393 2203017 out.go:177] * Using Docker driver with root privileges
	I0916 11:13:30.761972 2203017 cni.go:84] Creating CNI manager for ""
	I0916 11:13:30.762052 2203017 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 11:13:30.762070 2203017 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 11:13:30.762158 2203017 start.go:340] cluster config:
	{Name:test-preload-803604 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.24.4 ClusterName:test-preload-803604 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Cont
ainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.24.4 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: St
aticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 11:13:30.764349 2203017 out.go:177] * Starting "test-preload-803604" primary control-plane node in "test-preload-803604" cluster
	I0916 11:13:30.766728 2203017 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:13:30.769139 2203017 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:13:30.771231 2203017 preload.go:131] Checking if preload exists for k8s version v1.24.4 and runtime containerd
	I0916 11:13:30.771263 2203017 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 11:13:30.771674 2203017 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/test-preload-803604/config.json ...
	I0916 11:13:30.771705 2203017 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/test-preload-803604/config.json: {Name:mka76378be9158bc61b22f88b4d54349772e2b92 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:13:30.771950 2203017 cache.go:107] acquiring lock: {Name:mk2c65b7994daff0f6d1df7c663041c308acf7ca Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:13:30.772000 2203017 cache.go:107] acquiring lock: {Name:mk38a4e04be7f643107b863303dd2b9ffd973939 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:13:30.772092 2203017 image.go:135] retrieving image: gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:13:30.772145 2203017 image.go:135] retrieving image: registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:13:30.772389 2203017 cache.go:107] acquiring lock: {Name:mk66c7496a1c08725f58b9c7d9dfbc1304298bc6 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:13:30.772335 2203017 cache.go:107] acquiring lock: {Name:mk4d77ac39acbaec82535fae4b2576f3d0844c8c Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:13:30.772491 2203017 cache.go:107] acquiring lock: {Name:mk63806acde844d92743dc32a75cc2cbefc182f8 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:13:30.772564 2203017 image.go:135] retrieving image: registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:13:30.772639 2203017 image.go:135] retrieving image: registry.k8s.io/etcd:3.5.3-0
	I0916 11:13:30.772754 2203017 cache.go:107] acquiring lock: {Name:mk68ceb7f14e93e773452875bf1873e0b986242e Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:13:30.772893 2203017 image.go:135] retrieving image: registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:13:30.773229 2203017 image.go:135] retrieving image: registry.k8s.io/pause:3.7
	I0916 11:13:30.774028 2203017 image.go:178] daemon lookup for registry.k8s.io/kube-proxy:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:13:30.773114 2203017 cache.go:107] acquiring lock: {Name:mk16854964621c9b3ddb02b237bc3bac4b090e0c Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:13:30.774519 2203017 image.go:178] daemon lookup for registry.k8s.io/pause:3.7: Error response from daemon: No such image: registry.k8s.io/pause:3.7
	I0916 11:13:30.774668 2203017 image.go:178] daemon lookup for registry.k8s.io/kube-apiserver:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:13:30.774753 2203017 image.go:135] retrieving image: registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:13:30.774926 2203017 image.go:178] daemon lookup for registry.k8s.io/kube-scheduler:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:13:30.772718 2203017 cache.go:107] acquiring lock: {Name:mk4f91c8f3b08c889d1086e312a62d99c6c7acef Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:13:30.775269 2203017 image.go:135] retrieving image: registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:13:30.776264 2203017 image.go:178] daemon lookup for registry.k8s.io/coredns/coredns:v1.8.6: Error response from daemon: No such image: registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:13:30.776856 2203017 image.go:178] daemon lookup for gcr.io/k8s-minikube/storage-provisioner:v5: Error response from daemon: No such image: gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:13:30.788661 2203017 image.go:178] daemon lookup for registry.k8s.io/etcd:3.5.3-0: Error response from daemon: No such image: registry.k8s.io/etcd:3.5.3-0
	I0916 11:13:30.788794 2203017 image.go:178] daemon lookup for registry.k8s.io/kube-controller-manager:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-controller-manager:v1.24.4
	W0916 11:13:30.828822 2203017 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:13:30.828845 2203017 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:13:30.828929 2203017 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:13:30.828952 2203017 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:13:30.828962 2203017 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:13:30.828970 2203017 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:13:30.828976 2203017 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:13:30.945539 2203017 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:13:30.945615 2203017 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:13:30.945662 2203017 start.go:360] acquireMachinesLock for test-preload-803604: {Name:mk24300cf70137a422559b7b71940ed11df87767 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:13:30.945805 2203017 start.go:364] duration metric: took 104.59µs to acquireMachinesLock for "test-preload-803604"
	I0916 11:13:30.945839 2203017 start.go:93] Provisioning new machine with config: &{Name:test-preload-803604 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.24.4 ClusterName:test-preload-803604 Namespace:default APIServerHAVIP: APISe
rverName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.24.4 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false Disab
leMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.24.4 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:13:30.945937 2203017 start.go:125] createHost starting for "" (driver="docker")
	I0916 11:13:30.950255 2203017 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0916 11:13:30.950909 2203017 start.go:159] libmachine.API.Create for "test-preload-803604" (driver="docker")
	I0916 11:13:30.950941 2203017 client.go:168] LocalClient.Create starting
	I0916 11:13:30.951007 2203017 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 11:13:30.951063 2203017 main.go:141] libmachine: Decoding PEM data...
	I0916 11:13:30.951086 2203017 main.go:141] libmachine: Parsing certificate...
	I0916 11:13:30.951129 2203017 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 11:13:30.951153 2203017 main.go:141] libmachine: Decoding PEM data...
	I0916 11:13:30.951167 2203017 main.go:141] libmachine: Parsing certificate...
	I0916 11:13:30.951566 2203017 cli_runner.go:164] Run: docker network inspect test-preload-803604 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 11:13:30.967944 2203017 cli_runner.go:211] docker network inspect test-preload-803604 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 11:13:30.968045 2203017 network_create.go:284] running [docker network inspect test-preload-803604] to gather additional debugging logs...
	I0916 11:13:30.968072 2203017 cli_runner.go:164] Run: docker network inspect test-preload-803604
	W0916 11:13:30.983200 2203017 cli_runner.go:211] docker network inspect test-preload-803604 returned with exit code 1
	I0916 11:13:30.983241 2203017 network_create.go:287] error running [docker network inspect test-preload-803604]: docker network inspect test-preload-803604: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network test-preload-803604 not found
	I0916 11:13:30.983259 2203017 network_create.go:289] output of [docker network inspect test-preload-803604]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network test-preload-803604 not found
	
	** /stderr **
	I0916 11:13:30.983370 2203017 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:13:31.000234 2203017 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-941929ec13d1 IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:02:42:32:84:fe:19} reservation:<nil>}
	I0916 11:13:31.000678 2203017 network.go:211] skipping subnet 192.168.58.0/24 that is taken: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName:br-b138f637362d IfaceIPv4:192.168.58.1 IfaceMTU:1500 IfaceMAC:02:42:81:42:5c:08} reservation:<nil>}
	I0916 11:13:31.001280 2203017 network.go:211] skipping subnet 192.168.67.0/24 that is taken: &{IP:192.168.67.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.67.0/24 Gateway:192.168.67.1 ClientMin:192.168.67.2 ClientMax:192.168.67.254 Broadcast:192.168.67.255 IsPrivate:true Interface:{IfaceName:br-c7e139d3d7f3 IfaceIPv4:192.168.67.1 IfaceMTU:1500 IfaceMAC:02:42:92:83:9d:9a} reservation:<nil>}
	I0916 11:13:31.002245 2203017 network.go:206] using free private subnet 192.168.76.0/24: &{IP:192.168.76.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.76.0/24 Gateway:192.168.76.1 ClientMin:192.168.76.2 ClientMax:192.168.76.254 Broadcast:192.168.76.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x400049d0a0}
	I0916 11:13:31.002290 2203017 network_create.go:124] attempt to create docker network test-preload-803604 192.168.76.0/24 with gateway 192.168.76.1 and MTU of 1500 ...
	I0916 11:13:31.002378 2203017 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.76.0/24 --gateway=192.168.76.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=test-preload-803604 test-preload-803604
	I0916 11:13:31.076108 2203017 network_create.go:108] docker network test-preload-803604 192.168.76.0/24 created
	I0916 11:13:31.076142 2203017 kic.go:121] calculated static IP "192.168.76.2" for the "test-preload-803604" container
	I0916 11:13:31.076220 2203017 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:13:31.094061 2203017 cli_runner.go:164] Run: docker volume create test-preload-803604 --label name.minikube.sigs.k8s.io=test-preload-803604 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:13:31.113190 2203017 oci.go:103] Successfully created a docker volume test-preload-803604
	I0916 11:13:31.113279 2203017 cli_runner.go:164] Run: docker run --rm --name test-preload-803604-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=test-preload-803604 --entrypoint /usr/bin/test -v test-preload-803604:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	W0916 11:13:31.227042 2203017 image.go:283] image registry.k8s.io/coredns/coredns:v1.8.6 arch mismatch: want arm64 got amd64. fixing
	I0916 11:13:31.227101 2203017 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6
	I0916 11:13:31.336530 2203017 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4
	I0916 11:13:31.337913 2203017 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4
	I0916 11:13:31.351724 2203017 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4
	I0916 11:13:31.363201 2203017 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0
	I0916 11:13:31.373055 2203017 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7
	I0916 11:13:31.375537 2203017 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4
	I0916 11:13:31.473023 2203017 cache.go:157] /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7 exists
	I0916 11:13:31.473058 2203017 cache.go:96] cache image "registry.k8s.io/pause:3.7" -> "/home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7" took 700.670682ms
	I0916 11:13:31.473072 2203017 cache.go:80] save to tar file registry.k8s.io/pause:3.7 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7 succeeded
	I0916 11:13:31.567423 2203017 cache.go:157] /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6 exists
	I0916 11:13:31.567498 2203017 cache.go:96] cache image "registry.k8s.io/coredns/coredns:v1.8.6" -> "/home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6" took 794.388646ms
	I0916 11:13:31.567527 2203017 cache.go:80] save to tar file registry.k8s.io/coredns/coredns:v1.8.6 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6 succeeded
	I0916 11:13:31.766881 2203017 cache.go:157] /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4 exists
	I0916 11:13:31.766962 2203017 cache.go:96] cache image "registry.k8s.io/kube-scheduler:v1.24.4" -> "/home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4" took 994.200203ms
	I0916 11:13:31.767227 2203017 cache.go:80] save to tar file registry.k8s.io/kube-scheduler:v1.24.4 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4 succeeded
	W0916 11:13:31.877912 2203017 image.go:283] image gcr.io/k8s-minikube/storage-provisioner:v5 arch mismatch: want arm64 got amd64. fixing
	I0916 11:13:31.878025 2203017 cache.go:162] opening:  /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5
	I0916 11:13:31.903144 2203017 cache.go:157] /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4 exists
	I0916 11:13:31.903227 2203017 cache.go:96] cache image "registry.k8s.io/kube-controller-manager:v1.24.4" -> "/home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4" took 1.13051054s
	I0916 11:13:31.903264 2203017 cache.go:80] save to tar file registry.k8s.io/kube-controller-manager:v1.24.4 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4 succeeded
	I0916 11:13:31.940201 2203017 oci.go:107] Successfully prepared a docker volume test-preload-803604
	I0916 11:13:31.940232 2203017 preload.go:131] Checking if preload exists for k8s version v1.24.4 and runtime containerd
	W0916 11:13:31.940368 2203017 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0916 11:13:31.940489 2203017 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0916 11:13:31.964855 2203017 cache.go:157] /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4 exists
	I0916 11:13:31.964938 2203017 cache.go:96] cache image "registry.k8s.io/kube-proxy:v1.24.4" -> "/home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4" took 1.192948726s
	I0916 11:13:31.964975 2203017 cache.go:80] save to tar file registry.k8s.io/kube-proxy:v1.24.4 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4 succeeded
	I0916 11:13:31.983181 2203017 cache.go:157] /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4 exists
	I0916 11:13:31.983269 2203017 cache.go:96] cache image "registry.k8s.io/kube-apiserver:v1.24.4" -> "/home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4" took 1.210936138s
	I0916 11:13:31.983296 2203017 cache.go:80] save to tar file registry.k8s.io/kube-apiserver:v1.24.4 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4 succeeded
	I0916 11:13:32.043957 2203017 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname test-preload-803604 --name test-preload-803604 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=test-preload-803604 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=test-preload-803604 --network test-preload-803604 --ip 192.168.76.2 --volume test-preload-803604:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0
	I0916 11:13:32.331319 2203017 cache.go:157] /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5 exists
	I0916 11:13:32.331351 2203017 cache.go:96] cache image "gcr.io/k8s-minikube/storage-provisioner:v5" -> "/home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5" took 1.559403391s
	I0916 11:13:32.331365 2203017 cache.go:80] save to tar file gcr.io/k8s-minikube/storage-provisioner:v5 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5 succeeded
	I0916 11:13:32.492143 2203017 cache.go:157] /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0 exists
	I0916 11:13:32.492217 2203017 cache.go:96] cache image "registry.k8s.io/etcd:3.5.3-0" -> "/home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0" took 1.719728987s
	I0916 11:13:32.492243 2203017 cache.go:80] save to tar file registry.k8s.io/etcd:3.5.3-0 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0 succeeded
	I0916 11:13:32.492279 2203017 cache.go:87] Successfully saved all images to host disk.
	I0916 11:13:32.558090 2203017 cli_runner.go:164] Run: docker container inspect test-preload-803604 --format={{.State.Running}}
	I0916 11:13:32.580093 2203017 cli_runner.go:164] Run: docker container inspect test-preload-803604 --format={{.State.Status}}
	I0916 11:13:32.603098 2203017 cli_runner.go:164] Run: docker exec test-preload-803604 stat /var/lib/dpkg/alternatives/iptables
	I0916 11:13:32.672928 2203017 oci.go:144] the created container "test-preload-803604" has a running status.
	I0916 11:13:32.672958 2203017 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/test-preload-803604/id_rsa...
	I0916 11:13:33.321939 2203017 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/test-preload-803604/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0916 11:13:33.355144 2203017 cli_runner.go:164] Run: docker container inspect test-preload-803604 --format={{.State.Status}}
	I0916 11:13:33.394803 2203017 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0916 11:13:33.394823 2203017 kic_runner.go:114] Args: [docker exec --privileged test-preload-803604 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0916 11:13:33.449835 2203017 cli_runner.go:164] Run: docker container inspect test-preload-803604 --format={{.State.Status}}
	I0916 11:13:33.473383 2203017 machine.go:93] provisionDockerMachine start ...
	I0916 11:13:33.473480 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:33.495976 2203017 main.go:141] libmachine: Using SSH client type: native
	I0916 11:13:33.496233 2203017 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40767 <nil> <nil>}
	I0916 11:13:33.496243 2203017 main.go:141] libmachine: About to run SSH command:
	hostname
	I0916 11:13:33.658476 2203017 main.go:141] libmachine: SSH cmd err, output: <nil>: test-preload-803604
	
	I0916 11:13:33.658562 2203017 ubuntu.go:169] provisioning hostname "test-preload-803604"
	I0916 11:13:33.658647 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:33.679345 2203017 main.go:141] libmachine: Using SSH client type: native
	I0916 11:13:33.679663 2203017 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40767 <nil> <nil>}
	I0916 11:13:33.679678 2203017 main.go:141] libmachine: About to run SSH command:
	sudo hostname test-preload-803604 && echo "test-preload-803604" | sudo tee /etc/hostname
	I0916 11:13:33.831363 2203017 main.go:141] libmachine: SSH cmd err, output: <nil>: test-preload-803604
	
	I0916 11:13:33.831458 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:33.851868 2203017 main.go:141] libmachine: Using SSH client type: native
	I0916 11:13:33.852128 2203017 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x41abe0] 0x41d420 <nil>  [] 0s} 127.0.0.1 40767 <nil> <nil>}
	I0916 11:13:33.852152 2203017 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\stest-preload-803604' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 test-preload-803604/g' /etc/hosts;
				else 
					echo '127.0.1.1 test-preload-803604' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0916 11:13:33.992343 2203017 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0916 11:13:33.992445 2203017 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/19651-2057935/.minikube CaCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/19651-2057935/.minikube}
	I0916 11:13:33.992507 2203017 ubuntu.go:177] setting up certificates
	I0916 11:13:33.992552 2203017 provision.go:84] configureAuth start
	I0916 11:13:33.992675 2203017 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" test-preload-803604
	I0916 11:13:34.022633 2203017 provision.go:143] copyHostCerts
	I0916 11:13:34.022841 2203017 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem, removing ...
	I0916 11:13:34.022856 2203017 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem
	I0916 11:13:34.022939 2203017 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/ca.pem (1082 bytes)
	I0916 11:13:34.023050 2203017 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem, removing ...
	I0916 11:13:34.023056 2203017 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem
	I0916 11:13:34.023096 2203017 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/cert.pem (1123 bytes)
	I0916 11:13:34.023168 2203017 exec_runner.go:144] found /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem, removing ...
	I0916 11:13:34.023174 2203017 exec_runner.go:203] rm: /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem
	I0916 11:13:34.023203 2203017 exec_runner.go:151] cp: /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/19651-2057935/.minikube/key.pem (1675 bytes)
	I0916 11:13:34.023259 2203017 provision.go:117] generating server cert: /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca-key.pem org=jenkins.test-preload-803604 san=[127.0.0.1 192.168.76.2 localhost minikube test-preload-803604]
	I0916 11:13:34.234370 2203017 provision.go:177] copyRemoteCerts
	I0916 11:13:34.234456 2203017 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0916 11:13:34.234508 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:34.255671 2203017 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40767 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/test-preload-803604/id_rsa Username:docker}
	I0916 11:13:34.356170 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1082 bytes)
	I0916 11:13:34.381016 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server.pem --> /etc/docker/server.pem (1224 bytes)
	I0916 11:13:34.405661 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0916 11:13:34.430974 2203017 provision.go:87] duration metric: took 438.394427ms to configureAuth
	I0916 11:13:34.431000 2203017 ubuntu.go:193] setting minikube options for container-runtime
	I0916 11:13:34.431233 2203017 config.go:182] Loaded profile config "test-preload-803604": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.24.4
	I0916 11:13:34.431246 2203017 machine.go:96] duration metric: took 957.845727ms to provisionDockerMachine
	I0916 11:13:34.431254 2203017 client.go:171] duration metric: took 3.480306461s to LocalClient.Create
	I0916 11:13:34.431274 2203017 start.go:167] duration metric: took 3.480368475s to libmachine.API.Create "test-preload-803604"
	I0916 11:13:34.431288 2203017 start.go:293] postStartSetup for "test-preload-803604" (driver="docker")
	I0916 11:13:34.431298 2203017 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0916 11:13:34.431357 2203017 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0916 11:13:34.431400 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:34.448085 2203017 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40767 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/test-preload-803604/id_rsa Username:docker}
	I0916 11:13:34.548040 2203017 ssh_runner.go:195] Run: cat /etc/os-release
	I0916 11:13:34.551107 2203017 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0916 11:13:34.551143 2203017 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0916 11:13:34.551154 2203017 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0916 11:13:34.551161 2203017 info.go:137] Remote host: Ubuntu 22.04.4 LTS
	I0916 11:13:34.551172 2203017 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/addons for local assets ...
	I0916 11:13:34.551243 2203017 filesync.go:126] Scanning /home/jenkins/minikube-integration/19651-2057935/.minikube/files for local assets ...
	I0916 11:13:34.551327 2203017 filesync.go:149] local asset: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem -> 20633262.pem in /etc/ssl/certs
	I0916 11:13:34.551438 2203017 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0916 11:13:34.559848 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/ssl/certs/20633262.pem --> /etc/ssl/certs/20633262.pem (1708 bytes)
	I0916 11:13:34.584217 2203017 start.go:296] duration metric: took 152.914552ms for postStartSetup
	I0916 11:13:34.584613 2203017 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" test-preload-803604
	I0916 11:13:34.601003 2203017 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/test-preload-803604/config.json ...
	I0916 11:13:34.601304 2203017 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:13:34.601362 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:34.618683 2203017 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40767 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/test-preload-803604/id_rsa Username:docker}
	I0916 11:13:34.711599 2203017 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0916 11:13:34.716259 2203017 start.go:128] duration metric: took 3.770305917s to createHost
	I0916 11:13:34.716282 2203017 start.go:83] releasing machines lock for "test-preload-803604", held for 3.77046224s
	I0916 11:13:34.716352 2203017 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" test-preload-803604
	I0916 11:13:34.733669 2203017 ssh_runner.go:195] Run: cat /version.json
	I0916 11:13:34.733683 2203017 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0916 11:13:34.733719 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:34.733755 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:34.753467 2203017 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40767 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/test-preload-803604/id_rsa Username:docker}
	I0916 11:13:34.761831 2203017 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40767 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/test-preload-803604/id_rsa Username:docker}
	I0916 11:13:34.972224 2203017 ssh_runner.go:195] Run: systemctl --version
	I0916 11:13:34.976781 2203017 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0916 11:13:34.981017 2203017 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0916 11:13:35.011170 2203017 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0916 11:13:35.011277 2203017 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0916 11:13:35.044458 2203017 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0916 11:13:35.044482 2203017 start.go:495] detecting cgroup driver to use...
	I0916 11:13:35.044518 2203017 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0916 11:13:35.044588 2203017 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0916 11:13:35.057846 2203017 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0916 11:13:35.070473 2203017 docker.go:217] disabling cri-docker service (if available) ...
	I0916 11:13:35.070598 2203017 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0916 11:13:35.086322 2203017 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0916 11:13:35.102869 2203017 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0916 11:13:35.188499 2203017 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0916 11:13:35.288701 2203017 docker.go:233] disabling docker service ...
	I0916 11:13:35.288773 2203017 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0916 11:13:35.310908 2203017 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0916 11:13:35.323591 2203017 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0916 11:13:35.403733 2203017 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0916 11:13:35.504443 2203017 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0916 11:13:35.516400 2203017 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0916 11:13:35.533295 2203017 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.7"|' /etc/containerd/config.toml"
	I0916 11:13:35.543157 2203017 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0916 11:13:35.553130 2203017 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0916 11:13:35.553201 2203017 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0916 11:13:35.563092 2203017 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:13:35.573052 2203017 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0916 11:13:35.582770 2203017 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0916 11:13:35.593389 2203017 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0916 11:13:35.603034 2203017 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0916 11:13:35.613068 2203017 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0916 11:13:35.622801 2203017 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0916 11:13:35.632726 2203017 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0916 11:13:35.641267 2203017 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0916 11:13:35.649578 2203017 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:13:35.738634 2203017 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0916 11:13:35.825620 2203017 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0916 11:13:35.825687 2203017 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0916 11:13:35.829210 2203017 start.go:563] Will wait 60s for crictl version
	I0916 11:13:35.829322 2203017 ssh_runner.go:195] Run: which crictl
	I0916 11:13:35.832796 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0916 11:13:35.876407 2203017 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.22
	RuntimeApiVersion:  v1
	I0916 11:13:35.876480 2203017 ssh_runner.go:195] Run: containerd --version
	I0916 11:13:35.898541 2203017 ssh_runner.go:195] Run: containerd --version
	I0916 11:13:35.922833 2203017 out.go:177] * Preparing Kubernetes v1.24.4 on containerd 1.7.22 ...
	I0916 11:13:35.924754 2203017 cli_runner.go:164] Run: docker network inspect test-preload-803604 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:13:35.940783 2203017 ssh_runner.go:195] Run: grep 192.168.76.1	host.minikube.internal$ /etc/hosts
	I0916 11:13:35.944507 2203017 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.76.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0916 11:13:35.955395 2203017 kubeadm.go:883] updating cluster {Name:test-preload-803604 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.24.4 ClusterName:test-preload-803604 Namespace:default APIServerHAVIP: APIServerName:minikubeCA
APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.24.4 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetri
cs:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0916 11:13:35.955508 2203017 preload.go:131] Checking if preload exists for k8s version v1.24.4 and runtime containerd
	I0916 11:13:35.955556 2203017 ssh_runner.go:195] Run: sudo crictl images --output json
	I0916 11:13:36.007326 2203017 containerd.go:623] couldn't find preloaded image for "registry.k8s.io/kube-apiserver:v1.24.4". assuming images are not preloaded.
	I0916 11:13:36.007365 2203017 cache_images.go:88] LoadCachedImages start: [registry.k8s.io/kube-apiserver:v1.24.4 registry.k8s.io/kube-controller-manager:v1.24.4 registry.k8s.io/kube-scheduler:v1.24.4 registry.k8s.io/kube-proxy:v1.24.4 registry.k8s.io/pause:3.7 registry.k8s.io/etcd:3.5.3-0 registry.k8s.io/coredns/coredns:v1.8.6 gcr.io/k8s-minikube/storage-provisioner:v5]
	I0916 11:13:36.007447 2203017 image.go:135] retrieving image: gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:13:36.007724 2203017 image.go:135] retrieving image: registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:13:36.007844 2203017 image.go:135] retrieving image: registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:13:36.007977 2203017 image.go:135] retrieving image: registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:13:36.008100 2203017 image.go:135] retrieving image: registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:13:36.008190 2203017 image.go:135] retrieving image: registry.k8s.io/pause:3.7
	I0916 11:13:36.008275 2203017 image.go:135] retrieving image: registry.k8s.io/etcd:3.5.3-0
	I0916 11:13:36.008359 2203017 image.go:135] retrieving image: registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:13:36.013186 2203017 image.go:178] daemon lookup for registry.k8s.io/etcd:3.5.3-0: Error response from daemon: No such image: registry.k8s.io/etcd:3.5.3-0
	I0916 11:13:36.013506 2203017 image.go:178] daemon lookup for registry.k8s.io/kube-proxy:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:13:36.013678 2203017 image.go:178] daemon lookup for registry.k8s.io/kube-scheduler:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:13:36.013840 2203017 image.go:178] daemon lookup for registry.k8s.io/kube-controller-manager:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:13:36.013978 2203017 image.go:178] daemon lookup for registry.k8s.io/pause:3.7: Error response from daemon: No such image: registry.k8s.io/pause:3.7
	I0916 11:13:36.014102 2203017 image.go:178] daemon lookup for registry.k8s.io/coredns/coredns:v1.8.6: Error response from daemon: No such image: registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:13:36.014212 2203017 image.go:178] daemon lookup for gcr.io/k8s-minikube/storage-provisioner:v5: Error response from daemon: No such image: gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:13:36.014447 2203017 image.go:178] daemon lookup for registry.k8s.io/kube-apiserver:v1.24.4: Error response from daemon: No such image: registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:13:36.327201 2203017 containerd.go:267] Checking existence of image with name "registry.k8s.io/kube-proxy:v1.24.4" and sha "bd8cc6d58247078a865774b7f516f8afc3ac8cd080fd49650ca30ef2fbc6ebd1"
	I0916 11:13:36.327277 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images ls name==registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:13:36.352649 2203017 cache_images.go:116] "registry.k8s.io/kube-proxy:v1.24.4" needs transfer: "registry.k8s.io/kube-proxy:v1.24.4" does not exist at hash "bd8cc6d58247078a865774b7f516f8afc3ac8cd080fd49650ca30ef2fbc6ebd1" in container runtime
	I0916 11:13:36.352697 2203017 cri.go:218] Removing image: registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:13:36.352744 2203017 ssh_runner.go:195] Run: which crictl
	I0916 11:13:36.356134 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:13:36.372558 2203017 containerd.go:267] Checking existence of image with name "registry.k8s.io/pause:3.7" and sha "e5a475a0380575fb5df454b2e32bdec93e1ec0094d8a61e895b41567cb884550"
	I0916 11:13:36.372691 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images ls name==registry.k8s.io/pause:3.7
	I0916 11:13:36.402753 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:13:36.407450 2203017 cache_images.go:116] "registry.k8s.io/pause:3.7" needs transfer: "registry.k8s.io/pause:3.7" does not exist at hash "e5a475a0380575fb5df454b2e32bdec93e1ec0094d8a61e895b41567cb884550" in container runtime
	I0916 11:13:36.407551 2203017 cri.go:218] Removing image: registry.k8s.io/pause:3.7
	I0916 11:13:36.407638 2203017 ssh_runner.go:195] Run: which crictl
	I0916 11:13:36.439153 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-proxy:v1.24.4
	I0916 11:13:36.439276 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/pause:3.7
	I0916 11:13:36.488611 2203017 containerd.go:267] Checking existence of image with name "registry.k8s.io/kube-controller-manager:v1.24.4" and sha "81a4a8a4ac639bdd7e118359417a80cab1a0d0e4737eb735714cf7f8b15dc0c7"
	I0916 11:13:36.488739 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images ls name==registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:13:36.490450 2203017 containerd.go:267] Checking existence of image with name "registry.k8s.io/etcd:3.5.3-0" and sha "a9a710bb96df080e6b9c720eb85dc5b832ff84abf77263548d74fedec6466a5a"
	I0916 11:13:36.490550 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images ls name==registry.k8s.io/etcd:3.5.3-0
	W0916 11:13:36.493640 2203017 image.go:283] image registry.k8s.io/coredns/coredns:v1.8.6 arch mismatch: want arm64 got amd64. fixing
	I0916 11:13:36.493783 2203017 containerd.go:267] Checking existence of image with name "registry.k8s.io/coredns/coredns:v1.8.6" and sha "6af7f860a8197bfa3fdb7dec2061aa33870253e87a1e91c492d55b8a4fd38d14"
	I0916 11:13:36.493840 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images ls name==registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:13:36.495238 2203017 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4
	I0916 11:13:36.495401 2203017 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/kube-proxy_v1.24.4
	I0916 11:13:36.508708 2203017 containerd.go:267] Checking existence of image with name "registry.k8s.io/kube-scheduler:v1.24.4" and sha "5753e4610b3ec0ac100c3535b8d8a7507b3d031148e168c2c3c4b0f389976074"
	I0916 11:13:36.508793 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images ls name==registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:13:36.510371 2203017 containerd.go:267] Checking existence of image with name "registry.k8s.io/kube-apiserver:v1.24.4" and sha "3767741e7fba72f328a8500a18ef34481343eb78697e31ae5bf3e390a28317ae"
	I0916 11:13:36.510447 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images ls name==registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:13:36.525326 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/pause:3.7
	I0916 11:13:36.560219 2203017 cache_images.go:116] "registry.k8s.io/kube-controller-manager:v1.24.4" needs transfer: "registry.k8s.io/kube-controller-manager:v1.24.4" does not exist at hash "81a4a8a4ac639bdd7e118359417a80cab1a0d0e4737eb735714cf7f8b15dc0c7" in container runtime
	I0916 11:13:36.560366 2203017 cache_images.go:116] "registry.k8s.io/kube-scheduler:v1.24.4" needs transfer: "registry.k8s.io/kube-scheduler:v1.24.4" does not exist at hash "5753e4610b3ec0ac100c3535b8d8a7507b3d031148e168c2c3c4b0f389976074" in container runtime
	I0916 11:13:36.560418 2203017 cri.go:218] Removing image: registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:13:36.560418 2203017 cri.go:218] Removing image: registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:13:36.560521 2203017 ssh_runner.go:195] Run: which crictl
	I0916 11:13:36.560545 2203017 ssh_runner.go:195] Run: which crictl
	I0916 11:13:36.560323 2203017 cache_images.go:116] "registry.k8s.io/coredns/coredns:v1.8.6" needs transfer: "registry.k8s.io/coredns/coredns:v1.8.6" does not exist at hash "6af7f860a8197bfa3fdb7dec2061aa33870253e87a1e91c492d55b8a4fd38d14" in container runtime
	I0916 11:13:36.560595 2203017 cri.go:218] Removing image: registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:13:36.560617 2203017 ssh_runner.go:195] Run: which crictl
	I0916 11:13:36.560360 2203017 ssh_runner.go:352] existence check for /var/lib/minikube/images/kube-proxy_v1.24.4: stat -c "%s %y" /var/lib/minikube/images/kube-proxy_v1.24.4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/kube-proxy_v1.24.4': No such file or directory
	I0916 11:13:36.560654 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4 --> /var/lib/minikube/images/kube-proxy_v1.24.4 (38148096 bytes)
	I0916 11:13:36.560297 2203017 cache_images.go:116] "registry.k8s.io/etcd:3.5.3-0" needs transfer: "registry.k8s.io/etcd:3.5.3-0" does not exist at hash "a9a710bb96df080e6b9c720eb85dc5b832ff84abf77263548d74fedec6466a5a" in container runtime
	I0916 11:13:36.560683 2203017 cri.go:218] Removing image: registry.k8s.io/etcd:3.5.3-0
	I0916 11:13:36.560707 2203017 ssh_runner.go:195] Run: which crictl
	I0916 11:13:36.609594 2203017 cache_images.go:116] "registry.k8s.io/kube-apiserver:v1.24.4" needs transfer: "registry.k8s.io/kube-apiserver:v1.24.4" does not exist at hash "3767741e7fba72f328a8500a18ef34481343eb78697e31ae5bf3e390a28317ae" in container runtime
	I0916 11:13:36.609681 2203017 cri.go:218] Removing image: registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:13:36.609760 2203017 ssh_runner.go:195] Run: which crictl
	I0916 11:13:36.617045 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/pause:3.7
	I0916 11:13:36.617137 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:13:36.617160 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/etcd:3.5.3-0
	I0916 11:13:36.617218 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:13:36.617243 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-scheduler:v1.24.4
	W0916 11:13:36.630296 2203017 ssh_runner.go:129] session error, resetting client: ssh: rejected: connect failed (open failed)
	I0916 11:13:36.630387 2203017 retry.go:31] will retry after 263.966136ms: ssh: rejected: connect failed (open failed)
	W0916 11:13:36.630416 2203017 ssh_runner.go:129] session error, resetting client: ssh: rejected: connect failed (open failed)
	I0916 11:13:36.630439 2203017 retry.go:31] will retry after 193.227753ms: ssh: rejected: connect failed (open failed)
	W0916 11:13:36.630481 2203017 ssh_runner.go:129] session error, resetting client: ssh: rejected: connect failed (open failed)
	I0916 11:13:36.630518 2203017 retry.go:31] will retry after 322.879975ms: ssh: rejected: connect failed (open failed)
	I0916 11:13:36.734376 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:13:36.734480 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:36.734711 2203017 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7
	I0916 11:13:36.734798 2203017 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/pause_3.7
	I0916 11:13:36.734861 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:36.735522 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:13:36.735585 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:36.784538 2203017 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40767 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/test-preload-803604/id_rsa Username:docker}
	I0916 11:13:36.800575 2203017 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40767 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/test-preload-803604/id_rsa Username:docker}
	I0916 11:13:36.823871 2203017 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" test-preload-803604
	I0916 11:13:36.826558 2203017 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40767 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/test-preload-803604/id_rsa Username:docker}
	I0916 11:13:36.885819 2203017 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40767 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/test-preload-803604/id_rsa Username:docker}
	I0916 11:13:36.926533 2203017 ssh_runner.go:352] existence check for /var/lib/minikube/images/pause_3.7: stat -c "%s %y" /var/lib/minikube/images/pause_3.7: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/pause_3.7': No such file or directory
	I0916 11:13:36.926612 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7 --> /var/lib/minikube/images/pause_3.7 (268288 bytes)
	W0916 11:13:37.043707 2203017 image.go:283] image gcr.io/k8s-minikube/storage-provisioner:v5 arch mismatch: want arm64 got amd64. fixing
	I0916 11:13:37.043919 2203017 containerd.go:267] Checking existence of image with name "gcr.io/k8s-minikube/storage-provisioner:v5" and sha "66749159455b3f08c8318fe0233122f54d0f5889f9c5fdfb73c3fd9d99895b51"
	I0916 11:13:37.044003 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images ls name==gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:13:37.120155 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/etcd:3.5.3-0
	I0916 11:13:37.120335 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-controller-manager:v1.24.4
	I0916 11:13:37.129139 2203017 containerd.go:285] Loading image: /var/lib/minikube/images/pause_3.7
	I0916 11:13:37.129254 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images import /var/lib/minikube/images/pause_3.7
	I0916 11:13:37.188475 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:13:37.196832 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:13:37.196947 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:13:37.197004 2203017 cache_images.go:116] "gcr.io/k8s-minikube/storage-provisioner:v5" needs transfer: "gcr.io/k8s-minikube/storage-provisioner:v5" does not exist at hash "66749159455b3f08c8318fe0233122f54d0f5889f9c5fdfb73c3fd9d99895b51" in container runtime
	I0916 11:13:37.197033 2203017 cri.go:218] Removing image: gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:13:37.197061 2203017 ssh_runner.go:195] Run: which crictl
	I0916 11:13:37.320396 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/etcd:3.5.3-0
	I0916 11:13:37.348252 2203017 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4
	I0916 11:13:37.348419 2203017 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/kube-controller-manager_v1.24.4
	I0916 11:13:37.348504 2203017 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/pause_3.7 from cache
	I0916 11:13:37.348549 2203017 containerd.go:285] Loading image: /var/lib/minikube/images/kube-proxy_v1.24.4
	I0916 11:13:37.348598 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images import /var/lib/minikube/images/kube-proxy_v1.24.4
	I0916 11:13:37.392538 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-apiserver:v1.24.4
	I0916 11:13:37.392657 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:13:37.392729 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/kube-scheduler:v1.24.4
	I0916 11:13:37.392802 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi registry.k8s.io/coredns/coredns:v1.8.6
	I0916 11:13:37.409893 2203017 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0
	I0916 11:13:37.410068 2203017 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/etcd_3.5.3-0
	I0916 11:13:38.761139 2203017 ssh_runner.go:235] Completed: stat -c "%s %y" /var/lib/minikube/images/kube-controller-manager_v1.24.4: (1.412687682s)
	I0916 11:13:38.761172 2203017 ssh_runner.go:352] existence check for /var/lib/minikube/images/kube-controller-manager_v1.24.4: stat -c "%s %y" /var/lib/minikube/images/kube-controller-manager_v1.24.4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/kube-controller-manager_v1.24.4': No such file or directory
	I0916 11:13:38.761202 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4 --> /var/lib/minikube/images/kube-controller-manager_v1.24.4 (28246528 bytes)
	I0916 11:13:38.761317 2203017 ssh_runner.go:235] Completed: sudo /usr/bin/crictl rmi registry.k8s.io/coredns/coredns:v1.8.6: (1.368499619s)
	I0916 11:13:38.761350 2203017 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6
	I0916 11:13:38.761437 2203017 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/coredns_v1.8.6
	I0916 11:13:38.761499 2203017 ssh_runner.go:235] Completed: sudo /usr/bin/crictl rmi registry.k8s.io/kube-apiserver:v1.24.4: (1.368934504s)
	I0916 11:13:38.761519 2203017 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4
	I0916 11:13:38.761567 2203017 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/kube-apiserver_v1.24.4
	I0916 11:13:38.761621 2203017 ssh_runner.go:235] Completed: sudo /usr/bin/crictl rmi gcr.io/k8s-minikube/storage-provisioner:v5: (1.368948601s)
	I0916 11:13:38.761680 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:13:38.761736 2203017 ssh_runner.go:235] Completed: sudo /usr/bin/crictl rmi registry.k8s.io/kube-scheduler:v1.24.4: (1.368992662s)
	I0916 11:13:38.761757 2203017 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4
	I0916 11:13:38.761802 2203017 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/kube-scheduler_v1.24.4
	I0916 11:13:38.761861 2203017 ssh_runner.go:235] Completed: stat -c "%s %y" /var/lib/minikube/images/etcd_3.5.3-0: (1.351762874s)
	I0916 11:13:38.761874 2203017 ssh_runner.go:352] existence check for /var/lib/minikube/images/etcd_3.5.3-0: stat -c "%s %y" /var/lib/minikube/images/etcd_3.5.3-0: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/etcd_3.5.3-0': No such file or directory
	I0916 11:13:38.761885 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0 --> /var/lib/minikube/images/etcd_3.5.3-0 (81117184 bytes)
	I0916 11:13:38.761094 2203017 ssh_runner.go:235] Completed: sudo ctr -n=k8s.io images import /var/lib/minikube/images/kube-proxy_v1.24.4: (1.41245753s)
	I0916 11:13:38.761937 2203017 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-proxy_v1.24.4 from cache
	I0916 11:13:38.861004 2203017 ssh_runner.go:352] existence check for /var/lib/minikube/images/coredns_v1.8.6: stat -c "%s %y" /var/lib/minikube/images/coredns_v1.8.6: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/coredns_v1.8.6': No such file or directory
	I0916 11:13:38.861050 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6 --> /var/lib/minikube/images/coredns_v1.8.6 (12318720 bytes)
	I0916 11:13:38.861290 2203017 ssh_runner.go:352] existence check for /var/lib/minikube/images/kube-apiserver_v1.24.4: stat -c "%s %y" /var/lib/minikube/images/kube-apiserver_v1.24.4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/kube-apiserver_v1.24.4': No such file or directory
	I0916 11:13:38.861315 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4 --> /var/lib/minikube/images/kube-apiserver_v1.24.4 (30873088 bytes)
	I0916 11:13:38.861406 2203017 ssh_runner.go:195] Run: sudo /usr/bin/crictl rmi gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:13:38.861450 2203017 ssh_runner.go:352] existence check for /var/lib/minikube/images/kube-scheduler_v1.24.4: stat -c "%s %y" /var/lib/minikube/images/kube-scheduler_v1.24.4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/kube-scheduler_v1.24.4': No such file or directory
	I0916 11:13:38.861465 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4 --> /var/lib/minikube/images/kube-scheduler_v1.24.4 (14094336 bytes)
	I0916 11:13:38.968759 2203017 cache_images.go:289] Loading image from: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5
	I0916 11:13:38.968934 2203017 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/images/storage-provisioner_v5
	I0916 11:13:39.036142 2203017 ssh_runner.go:352] existence check for /var/lib/minikube/images/storage-provisioner_v5: stat -c "%s %y" /var/lib/minikube/images/storage-provisioner_v5: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/images/storage-provisioner_v5': No such file or directory
	I0916 11:13:39.036235 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5 --> /var/lib/minikube/images/storage-provisioner_v5 (8035840 bytes)
	I0916 11:13:39.266002 2203017 containerd.go:285] Loading image: /var/lib/minikube/images/coredns_v1.8.6
	I0916 11:13:39.266293 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images import /var/lib/minikube/images/coredns_v1.8.6
	I0916 11:13:40.081076 2203017 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/coredns/coredns_v1.8.6 from cache
	I0916 11:13:40.081172 2203017 containerd.go:285] Loading image: /var/lib/minikube/images/storage-provisioner_v5
	I0916 11:13:40.081240 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images import /var/lib/minikube/images/storage-provisioner_v5
	I0916 11:13:40.421607 2203017 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/gcr.io/k8s-minikube/storage-provisioner_v5 from cache
	I0916 11:13:40.421657 2203017 containerd.go:285] Loading image: /var/lib/minikube/images/kube-scheduler_v1.24.4
	I0916 11:13:40.421727 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images import /var/lib/minikube/images/kube-scheduler_v1.24.4
	I0916 11:13:41.147966 2203017 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-scheduler_v1.24.4 from cache
	I0916 11:13:41.148065 2203017 containerd.go:285] Loading image: /var/lib/minikube/images/kube-controller-manager_v1.24.4
	I0916 11:13:41.148152 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images import /var/lib/minikube/images/kube-controller-manager_v1.24.4
	I0916 11:13:42.159406 2203017 ssh_runner.go:235] Completed: sudo ctr -n=k8s.io images import /var/lib/minikube/images/kube-controller-manager_v1.24.4: (1.011220122s)
	I0916 11:13:42.159511 2203017 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-controller-manager_v1.24.4 from cache
	I0916 11:13:42.159577 2203017 containerd.go:285] Loading image: /var/lib/minikube/images/kube-apiserver_v1.24.4
	I0916 11:13:42.159673 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images import /var/lib/minikube/images/kube-apiserver_v1.24.4
	I0916 11:13:43.259939 2203017 ssh_runner.go:235] Completed: sudo ctr -n=k8s.io images import /var/lib/minikube/images/kube-apiserver_v1.24.4: (1.100236466s)
	I0916 11:13:43.260019 2203017 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/kube-apiserver_v1.24.4 from cache
	I0916 11:13:43.260051 2203017 containerd.go:285] Loading image: /var/lib/minikube/images/etcd_3.5.3-0
	I0916 11:13:43.260105 2203017 ssh_runner.go:195] Run: sudo ctr -n=k8s.io images import /var/lib/minikube/images/etcd_3.5.3-0
	I0916 11:13:45.726486 2203017 ssh_runner.go:235] Completed: sudo ctr -n=k8s.io images import /var/lib/minikube/images/etcd_3.5.3-0: (2.466353124s)
	I0916 11:13:45.726512 2203017 cache_images.go:321] Transferred and loaded /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/images/arm64/registry.k8s.io/etcd_3.5.3-0 from cache
	I0916 11:13:45.726540 2203017 cache_images.go:123] Successfully loaded all cached images
	I0916 11:13:45.726546 2203017 cache_images.go:92] duration metric: took 9.719166052s to LoadCachedImages
	I0916 11:13:45.726555 2203017 kubeadm.go:934] updating node { 192.168.76.2 8443 v1.24.4 containerd true true} ...
	I0916 11:13:45.726654 2203017 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.24.4/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --container-runtime-endpoint=unix:///run/containerd/containerd.sock --hostname-override=test-preload-803604 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.76.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.24.4 ClusterName:test-preload-803604 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0916 11:13:45.726745 2203017 ssh_runner.go:195] Run: sudo crictl info
	I0916 11:13:45.769767 2203017 cni.go:84] Creating CNI manager for ""
	I0916 11:13:45.769794 2203017 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 11:13:45.769804 2203017 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0916 11:13:45.769824 2203017 kubeadm.go:181] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.76.2 APIServerPort:8443 KubernetesVersion:v1.24.4 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:test-preload-803604 NodeName:test-preload-803604 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.76.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.76.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt Static
PodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0916 11:13:45.769958 2203017 kubeadm.go:187] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.76.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "test-preload-803604"
	  kubeletExtraArgs:
	    node-ip: 192.168.76.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta3
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.76.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.24.4
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0916 11:13:45.770033 2203017 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.24.4
	I0916 11:13:45.779149 2203017 binaries.go:47] Didn't find k8s binaries: sudo ls /var/lib/minikube/binaries/v1.24.4: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/var/lib/minikube/binaries/v1.24.4': No such file or directory
	
	Initiating transfer...
	I0916 11:13:45.779213 2203017 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/binaries/v1.24.4
	I0916 11:13:45.790714 2203017 download.go:107] Downloading: https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubectl?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubectl.sha256 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/linux/arm64/v1.24.4/kubectl
	I0916 11:13:45.791152 2203017 download.go:107] Downloading: https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet.sha256 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/linux/arm64/v1.24.4/kubelet
	I0916 11:13:45.791255 2203017 download.go:107] Downloading: https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubeadm?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubeadm.sha256 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/linux/arm64/v1.24.4/kubeadm
	I0916 11:13:46.374968 2203017 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.24.4/kubectl
	I0916 11:13:46.379670 2203017 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.24.4/kubectl: stat -c "%s %y" /var/lib/minikube/binaries/v1.24.4/kubectl: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.24.4/kubectl': No such file or directory
	I0916 11:13:46.379783 2203017 ssh_runner.go:362] scp /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/linux/arm64/v1.24.4/kubectl --> /var/lib/minikube/binaries/v1.24.4/kubectl (44564480 bytes)
	I0916 11:13:46.655695 2203017 out.go:201] 
	W0916 11:13:46.657589 2203017 out.go:270] X Exiting due to K8S_INSTALL_FAILED: Failed to update cluster: update primary control-plane node: downloading binaries: downloading kubelet: download failed: https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet.sha256: getter: &{Ctx:context.Background Src:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet.sha256 Dst:/home/jenkins/minikube-integration/19651-2057935/.minikube/cache/linux/arm64/v1.24.4/kubelet.download Pwd: Mode:2 Umask:---------- Detectors:[0x4772320 0x4772320 0x4772320 0x4772320 0x4772320 0x4772320 0x4772320] Decompressors:map[bz2:0x400051e090 gz:0x400051e098 tar:0x400051e040 tar.bz2:0x400051e050 tar.gz:0x400051e060 tar.xz:0x400051e070 tar.zst:0x400051e080 tbz2:0x400051e050 tgz:0x400051e060 txz:0x400051e070 tzst:0x400051e080 xz:0x400051e0a0 zip:0x400051e0b0 zst:0x400051e0a8] Getters:map[fil
e:0x4001958ca0 http:0x40006edb30 https:0x40006edb80] Dir:false ProgressListener:<nil> Insecure:false DisableSymlinks:false Options:[]}: stream error: stream ID 1; PROTOCOL_ERROR; received from peer
	X Exiting due to K8S_INSTALL_FAILED: Failed to update cluster: update primary control-plane node: downloading binaries: downloading kubelet: download failed: https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet.sha256: getter: &{Ctx:context.Background Src:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet?checksum=file:https://dl.k8s.io/release/v1.24.4/bin/linux/arm64/kubelet.sha256 Dst:/home/jenkins/minikube-integration/19651-2057935/.minikube/cache/linux/arm64/v1.24.4/kubelet.download Pwd: Mode:2 Umask:---------- Detectors:[0x4772320 0x4772320 0x4772320 0x4772320 0x4772320 0x4772320 0x4772320] Decompressors:map[bz2:0x400051e090 gz:0x400051e098 tar:0x400051e040 tar.bz2:0x400051e050 tar.gz:0x400051e060 tar.xz:0x400051e070 tar.zst:0x400051e080 tbz2:0x400051e050 tgz:0x400051e060 txz:0x400051e070 tzst:0x400051e080 xz:0x400051e0a0 zip:0x400051e0b0 zst:0x400051e0a8] Getters:map[file:0x4001958ca0 http:0x40006edb30 https:0x4
0006edb80] Dir:false ProgressListener:<nil> Insecure:false DisableSymlinks:false Options:[]}: stream error: stream ID 1; PROTOCOL_ERROR; received from peer
	W0916 11:13:46.657608 2203017 out.go:270] * 
	* 
	W0916 11:13:46.658552 2203017 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0916 11:13:46.660702 2203017 out.go:201] 

                                                
                                                
** /stderr **
preload_test.go:46: out/minikube-linux-arm64 start -p test-preload-803604 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.24.4 failed: exit status 100
panic.go:629: *** TestPreload FAILED at 2024-09-16 11:13:46.696857577 +0000 UTC m=+2698.338257742
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestPreload]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect test-preload-803604
helpers_test.go:235: (dbg) docker inspect test-preload-803604:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "1f07ea91ab2eba214a5db0ab66b0487ee24575a8de72b0ba830674498b4584f8",
	        "Created": "2024-09-16T11:13:32.063395797Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2203416,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:13:32.354406292Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/1f07ea91ab2eba214a5db0ab66b0487ee24575a8de72b0ba830674498b4584f8/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/1f07ea91ab2eba214a5db0ab66b0487ee24575a8de72b0ba830674498b4584f8/hostname",
	        "HostsPath": "/var/lib/docker/containers/1f07ea91ab2eba214a5db0ab66b0487ee24575a8de72b0ba830674498b4584f8/hosts",
	        "LogPath": "/var/lib/docker/containers/1f07ea91ab2eba214a5db0ab66b0487ee24575a8de72b0ba830674498b4584f8/1f07ea91ab2eba214a5db0ab66b0487ee24575a8de72b0ba830674498b4584f8-json.log",
	        "Name": "/test-preload-803604",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "test-preload-803604:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "test-preload-803604",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/65a110e2b82956359e569a827c9f41c9a64648c33faabc81e2e650d9b9d452af-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/65a110e2b82956359e569a827c9f41c9a64648c33faabc81e2e650d9b9d452af/merged",
	                "UpperDir": "/var/lib/docker/overlay2/65a110e2b82956359e569a827c9f41c9a64648c33faabc81e2e650d9b9d452af/diff",
	                "WorkDir": "/var/lib/docker/overlay2/65a110e2b82956359e569a827c9f41c9a64648c33faabc81e2e650d9b9d452af/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "volume",
	                "Name": "test-preload-803604",
	                "Source": "/var/lib/docker/volumes/test-preload-803604/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            },
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            }
	        ],
	        "Config": {
	            "Hostname": "test-preload-803604",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "test-preload-803604",
	                "name.minikube.sigs.k8s.io": "test-preload-803604",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "ba30479b3c2770b8888b6a671856fcf7d2d0029fa1f6754a46a2136a1225a041",
	            "SandboxKey": "/var/run/docker/netns/ba30479b3c27",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40767"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40768"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40771"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40769"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40770"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "test-preload-803604": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.76.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:4c:02",
	                    "DriverOpts": null,
	                    "NetworkID": "9b8f1d14c18f2fc7ed0e6f8b9e4b40fe2f6a40fe51dc13a17b30c207a6e2a301",
	                    "EndpointID": "0316d381206e1b0d8739d90262e9a21a6b06b6c9e20dfb7d1c68f89b34e6c5b8",
	                    "Gateway": "192.168.76.1",
	                    "IPAddress": "192.168.76.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "test-preload-803604",
	                        "1f07ea91ab2e"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p test-preload-803604 -n test-preload-803604
helpers_test.go:239: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Host}} -p test-preload-803604 -n test-preload-803604: exit status 6 (306.98848ms)

                                                
                                                
-- stdout --
	Running
	WARNING: Your kubectl is pointing to stale minikube-vm.
	To fix the kubectl context, run `minikube update-context`

                                                
                                                
-- /stdout --
** stderr ** 
	E0916 11:13:47.014476 2204972 status.go:417] kubeconfig endpoint: get endpoint: "test-preload-803604" does not appear in /home/jenkins/minikube-integration/19651-2057935/kubeconfig

                                                
                                                
** /stderr **
helpers_test.go:239: status error: exit status 6 (may be ok)
helpers_test.go:241: "test-preload-803604" host is not running, skipping log retrieval (state="Running\nWARNING: Your kubectl is pointing to stale minikube-vm.\nTo fix the kubectl context, run `minikube update-context`")
helpers_test.go:175: Cleaning up "test-preload-803604" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p test-preload-803604
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p test-preload-803604: (1.939099495s)
--- FAIL: TestPreload (18.42s)

                                                
                                    
x
+
TestKubernetesUpgrade (346.35s)

                                                
                                                
=== RUN   TestKubernetesUpgrade
=== PAUSE TestKubernetesUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestKubernetesUpgrade
version_upgrade_test.go:222: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-969540 --memory=2200 --kubernetes-version=v1.20.0 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:222: (dbg) Done: out/minikube-linux-arm64 start -p kubernetes-upgrade-969540 --memory=2200 --kubernetes-version=v1.20.0 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (56.151234444s)
version_upgrade_test.go:227: (dbg) Run:  out/minikube-linux-arm64 stop -p kubernetes-upgrade-969540
version_upgrade_test.go:227: (dbg) Done: out/minikube-linux-arm64 stop -p kubernetes-upgrade-969540: (1.32487842s)
version_upgrade_test.go:232: (dbg) Run:  out/minikube-linux-arm64 -p kubernetes-upgrade-969540 status --format={{.Host}}
version_upgrade_test.go:232: (dbg) Non-zero exit: out/minikube-linux-arm64 -p kubernetes-upgrade-969540 status --format={{.Host}}: exit status 7 (67.38191ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
version_upgrade_test.go:234: status error: exit status 7 (may be ok)
version_upgrade_test.go:243: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-969540 --memory=2200 --kubernetes-version=v1.31.1 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
E0916 11:19:07.672068 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
version_upgrade_test.go:243: (dbg) Done: out/minikube-linux-arm64 start -p kubernetes-upgrade-969540 --memory=2200 --kubernetes-version=v1.31.1 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (4m42.634318484s)
version_upgrade_test.go:248: (dbg) Run:  kubectl --context kubernetes-upgrade-969540 version --output=json
version_upgrade_test.go:248: (dbg) Non-zero exit: kubectl --context kubernetes-upgrade-969540 version --output=json: fork/exec /usr/local/bin/kubectl: exec format error (2.602808ms)
version_upgrade_test.go:250: error running kubectl: fork/exec /usr/local/bin/kubectl: exec format error
panic.go:629: *** TestKubernetesUpgrade FAILED at 2024-09-16 11:23:26.074949343 +0000 UTC m=+3277.716349500
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestKubernetesUpgrade]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect kubernetes-upgrade-969540
helpers_test.go:235: (dbg) docker inspect kubernetes-upgrade-969540:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "c966ed2029916eee4e26e00963bf4b589047578204eca9252fca9bd6b80797ee",
	        "Created": "2024-09-16T11:17:54.122903916Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 2228673,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2024-09-16T11:18:43.959065282Z",
	            "FinishedAt": "2024-09-16T11:18:42.603960482Z"
	        },
	        "Image": "sha256:a1b71fa87733590eb4674b16f6945626ae533f3af37066893e3fd70eb9476268",
	        "ResolvConfPath": "/var/lib/docker/containers/c966ed2029916eee4e26e00963bf4b589047578204eca9252fca9bd6b80797ee/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/c966ed2029916eee4e26e00963bf4b589047578204eca9252fca9bd6b80797ee/hostname",
	        "HostsPath": "/var/lib/docker/containers/c966ed2029916eee4e26e00963bf4b589047578204eca9252fca9bd6b80797ee/hosts",
	        "LogPath": "/var/lib/docker/containers/c966ed2029916eee4e26e00963bf4b589047578204eca9252fca9bd6b80797ee/c966ed2029916eee4e26e00963bf4b589047578204eca9252fca9bd6b80797ee-json.log",
	        "Name": "/kubernetes-upgrade-969540",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "kubernetes-upgrade-969540:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "kubernetes-upgrade-969540",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "LowerDir": "/var/lib/docker/overlay2/20f09a555f366352bbba0ccad3e6274917dea0c235f52d10a3ab8c9adab0ff36-init/diff:/var/lib/docker/overlay2/79d379de88236d5570ffcb2b7890b1e2e3a492a5a6d74ff9f8442d13070bfef6/diff",
	                "MergedDir": "/var/lib/docker/overlay2/20f09a555f366352bbba0ccad3e6274917dea0c235f52d10a3ab8c9adab0ff36/merged",
	                "UpperDir": "/var/lib/docker/overlay2/20f09a555f366352bbba0ccad3e6274917dea0c235f52d10a3ab8c9adab0ff36/diff",
	                "WorkDir": "/var/lib/docker/overlay2/20f09a555f366352bbba0ccad3e6274917dea0c235f52d10a3ab8c9adab0ff36/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "kubernetes-upgrade-969540",
	                "Source": "/var/lib/docker/volumes/kubernetes-upgrade-969540/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "kubernetes-upgrade-969540",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "kubernetes-upgrade-969540",
	                "name.minikube.sigs.k8s.io": "kubernetes-upgrade-969540",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "ce90ab0c4bc0ba4e03902abc7aea064487c191348176b7124c0e47586bcae9de",
	            "SandboxKey": "/var/run/docker/netns/ce90ab0c4bc0",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40817"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40818"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40821"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40819"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "40820"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "kubernetes-upgrade-969540": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.76.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "02:42:c0:a8:4c:02",
	                    "DriverOpts": null,
	                    "NetworkID": "2434c05a1d9fcb4044f0e35f0932c3bc873520288d0202c94efac7f3191f1488",
	                    "EndpointID": "cee7486d973d2e0d2545fe61981f6f06bf9d9db992e1f4ebe120e2d2a75fd542",
	                    "Gateway": "192.168.76.1",
	                    "IPAddress": "192.168.76.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "kubernetes-upgrade-969540",
	                        "c966ed202991"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p kubernetes-upgrade-969540 -n kubernetes-upgrade-969540
helpers_test.go:244: <<< TestKubernetesUpgrade FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestKubernetesUpgrade]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p kubernetes-upgrade-969540 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p kubernetes-upgrade-969540 logs -n 25: (2.828988909s)
helpers_test.go:252: TestKubernetesUpgrade logs: 
-- stdout --
	
	==> Audit <==
	|---------|------------------------------------------------------|--------------------------|---------|---------|---------------------|---------------------|
	| Command |                         Args                         |         Profile          |  User   | Version |     Start Time      |      End Time       |
	|---------|------------------------------------------------------|--------------------------|---------|---------|---------------------|---------------------|
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl status kubelet --all                       |                          |         |         |                     |                     |
	|         | --full --no-pager                                    |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl cat kubelet                                |                          |         |         |                     |                     |
	|         | --no-pager                                           |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | journalctl -xeu kubelet --all                        |                          |         |         |                     |                     |
	|         | --full --no-pager                                    |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /etc/kubernetes/kubelet.conf                         |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /var/lib/kubelet/config.yaml                         |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl status docker --all                        |                          |         |         |                     |                     |
	|         | --full --no-pager                                    |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl cat docker                                 |                          |         |         |                     |                     |
	|         | --no-pager                                           |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /etc/docker/daemon.json                              |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo docker                         | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | system info                                          |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl status cri-docker                          |                          |         |         |                     |                     |
	|         | --all --full --no-pager                              |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl cat cri-docker                             |                          |         |         |                     |                     |
	|         | --no-pager                                           |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /etc/systemd/system/cri-docker.service.d/10-cni.conf |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /usr/lib/systemd/system/cri-docker.service           |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | cri-dockerd --version                                |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl status containerd                          |                          |         |         |                     |                     |
	|         | --all --full --no-pager                              |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl cat containerd                             |                          |         |         |                     |                     |
	|         | --no-pager                                           |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /lib/systemd/system/containerd.service               |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo cat                            | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /etc/containerd/config.toml                          |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | containerd config dump                               |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl status crio --all                          |                          |         |         |                     |                     |
	|         | --full --no-pager                                    |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo                                | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | systemctl cat crio --no-pager                        |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo find                           | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | /etc/crio -type f -exec sh -c                        |                          |         |         |                     |                     |
	|         | 'echo {}; cat {}' \;                                 |                          |         |         |                     |                     |
	| ssh     | -p cilium-430967 sudo crio                           | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | config                                               |                          |         |         |                     |                     |
	| delete  | -p cilium-430967                                     | cilium-430967            | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC | 16 Sep 24 11:23 UTC |
	| start   | -p force-systemd-env-836951                          | force-systemd-env-836951 | jenkins | v1.34.0 | 16 Sep 24 11:23 UTC |                     |
	|         | --memory=2048                                        |                          |         |         |                     |                     |
	|         | --alsologtostderr                                    |                          |         |         |                     |                     |
	|         | -v=5 --driver=docker                                 |                          |         |         |                     |                     |
	|         | --container-runtime=containerd                       |                          |         |         |                     |                     |
	|---------|------------------------------------------------------|--------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 11:23:20
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 11:23:20.348786 2250592 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:23:20.349028 2250592 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:23:20.349055 2250592 out.go:358] Setting ErrFile to fd 2...
	I0916 11:23:20.349072 2250592 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:23:20.349355 2250592 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 11:23:20.349806 2250592 out.go:352] Setting JSON to false
	I0916 11:23:20.350979 2250592 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":140743,"bootTime":1726345058,"procs":221,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 11:23:20.354740 2250592 start.go:139] virtualization:  
	I0916 11:23:20.358272 2250592 out.go:177] * [force-systemd-env-836951] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 11:23:20.361778 2250592 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 11:23:20.361848 2250592 notify.go:220] Checking for updates...
	I0916 11:23:20.367431 2250592 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 11:23:20.370111 2250592 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:23:20.372832 2250592 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 11:23:20.375488 2250592 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 11:23:20.377925 2250592 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=true
	I0916 11:23:20.381014 2250592 config.go:182] Loaded profile config "kubernetes-upgrade-969540": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:23:20.381175 2250592 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 11:23:20.428130 2250592 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 11:23:20.428269 2250592 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:23:20.574416 2250592 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:55 OomKillDisable:true NGoroutines:73 SystemTime:2024-09-16 11:23:20.559742402 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:23:20.574539 2250592 docker.go:318] overlay module found
	I0916 11:23:20.577445 2250592 out.go:177] * Using the docker driver based on user configuration
	I0916 11:23:20.580119 2250592 start.go:297] selected driver: docker
	I0916 11:23:20.580149 2250592 start.go:901] validating driver "docker" against <nil>
	I0916 11:23:20.580163 2250592 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 11:23:20.580868 2250592 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:23:20.681179 2250592 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:55 OomKillDisable:true NGoroutines:73 SystemTime:2024-09-16 11:23:20.669262942 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:23:20.681415 2250592 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 11:23:20.681692 2250592 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0916 11:23:20.684301 2250592 out.go:177] * Using Docker driver with root privileges
	I0916 11:23:20.686991 2250592 cni.go:84] Creating CNI manager for ""
	I0916 11:23:20.687068 2250592 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 11:23:20.687077 2250592 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 11:23:20.687172 2250592 start.go:340] cluster config:
	{Name:force-systemd-env-836951 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:force-systemd-env-836951 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.
local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}

                                                
                                                
	I0916 11:23:20.691695 2250592 out.go:177] * Starting "force-systemd-env-836951" primary control-plane node in "force-systemd-env-836951" cluster
	I0916 11:23:20.694513 2250592 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 11:23:20.697003 2250592 out.go:177] * Pulling base image v0.0.45-1726358845-19644 ...
	I0916 11:23:20.700163 2250592 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:23:20.700225 2250592 preload.go:146] Found local preload: /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4
	I0916 11:23:20.700243 2250592 cache.go:56] Caching tarball of preloaded images
	I0916 11:23:20.700325 2250592 preload.go:172] Found /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0916 11:23:20.700335 2250592 cache.go:59] Finished verifying existence of preloaded tar for v1.31.1 on containerd
	I0916 11:23:20.700450 2250592 profile.go:143] Saving config to /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/force-systemd-env-836951/config.json ...
	I0916 11:23:20.700473 2250592 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/force-systemd-env-836951/config.json: {Name:mk0bc16f80e7c6c83ef66be19eed8a44870deb11 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:23:20.700629 2250592 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	W0916 11:23:20.736865 2250592 image.go:95] image gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 is of wrong architecture
	I0916 11:23:20.736883 2250592 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 11:23:20.736955 2250592 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 11:23:20.736973 2250592 image.go:66] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory, skipping pull
	I0916 11:23:20.736979 2250592 image.go:135] gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 exists in cache, skipping pull
	I0916 11:23:20.736986 2250592 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	I0916 11:23:20.736991 2250592 cache.go:162] Loading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from local cache
	I0916 11:23:20.738570 2250592 image.go:273] response: 
	I0916 11:23:20.865576 2250592 cache.go:164] successfully loaded and using gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 from cached tarball
	I0916 11:23:20.865635 2250592 cache.go:194] Successfully downloaded all kic artifacts
	I0916 11:23:20.865666 2250592 start.go:360] acquireMachinesLock for force-systemd-env-836951: {Name:mk67caef7de2f435bd151309210b0561424f4340 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0916 11:23:20.865800 2250592 start.go:364] duration metric: took 116.118µs to acquireMachinesLock for "force-systemd-env-836951"
	I0916 11:23:20.865833 2250592 start.go:93] Provisioning new machine with config: &{Name:force-systemd-env-836951 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2048 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:force-systemd-env-836951 Namespace:default APIServerHA
VIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: S
ocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:23:20.865909 2250592 start.go:125] createHost starting for "" (driver="docker")
	I0916 11:23:22.760459 2228471 kubeadm.go:310] [api-check] The API server is healthy after 9.513387121s
	I0916 11:23:22.785562 2228471 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0916 11:23:22.803637 2228471 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0916 11:23:22.846331 2228471 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0916 11:23:22.846825 2228471 kubeadm.go:310] [mark-control-plane] Marking the node kubernetes-upgrade-969540 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0916 11:23:22.860035 2228471 kubeadm.go:310] [bootstrap-token] Using token: 2d0tq3.4hk3fa6xe4j2bhyl
	I0916 11:23:22.861862 2228471 out.go:235]   - Configuring RBAC rules ...
	I0916 11:23:22.861982 2228471 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0916 11:23:22.871628 2228471 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0916 11:23:22.884007 2228471 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0916 11:23:22.889564 2228471 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0916 11:23:22.894560 2228471 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0916 11:23:22.899481 2228471 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0916 11:23:23.169031 2228471 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0916 11:23:23.603017 2228471 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0916 11:23:24.169663 2228471 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0916 11:23:24.172654 2228471 kubeadm.go:310] 
	I0916 11:23:24.172744 2228471 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0916 11:23:24.172755 2228471 kubeadm.go:310] 
	I0916 11:23:24.172831 2228471 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0916 11:23:24.172840 2228471 kubeadm.go:310] 
	I0916 11:23:24.172865 2228471 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0916 11:23:24.173269 2228471 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0916 11:23:24.173336 2228471 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0916 11:23:24.173347 2228471 kubeadm.go:310] 
	I0916 11:23:24.173400 2228471 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0916 11:23:24.173409 2228471 kubeadm.go:310] 
	I0916 11:23:24.173456 2228471 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0916 11:23:24.173463 2228471 kubeadm.go:310] 
	I0916 11:23:24.173514 2228471 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0916 11:23:24.173591 2228471 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0916 11:23:24.173663 2228471 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0916 11:23:24.173671 2228471 kubeadm.go:310] 
	I0916 11:23:24.173919 2228471 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0916 11:23:24.174005 2228471 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0916 11:23:24.174021 2228471 kubeadm.go:310] 
	I0916 11:23:24.174273 2228471 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 2d0tq3.4hk3fa6xe4j2bhyl \
	I0916 11:23:24.174383 2228471 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e \
	I0916 11:23:24.174544 2228471 kubeadm.go:310] 	--control-plane 
	I0916 11:23:24.174560 2228471 kubeadm.go:310] 
	I0916 11:23:24.174829 2228471 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0916 11:23:24.174843 2228471 kubeadm.go:310] 
	I0916 11:23:24.175094 2228471 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 2d0tq3.4hk3fa6xe4j2bhyl \
	I0916 11:23:24.175346 2228471 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:0f7862f23ce3bdf9eed20de4f2ae19a4017495bf6022b60bebe68074c7889e3e 
	I0916 11:23:24.185999 2228471 kubeadm.go:310] W0916 11:23:08.317109    5785 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "ClusterConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:23:24.186302 2228471 kubeadm.go:310] W0916 11:23:08.318198    5785 common.go:101] your configuration file uses a deprecated API spec: "kubeadm.k8s.io/v1beta3" (kind: "InitConfiguration"). Please use 'kubeadm config migrate --old-config old.yaml --new-config new.yaml', which will write the new, similar spec using a newer API version.
	I0916 11:23:24.186535 2228471 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1069-aws\n", err: exit status 1
	I0916 11:23:24.186642 2228471 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0916 11:23:24.186662 2228471 cni.go:84] Creating CNI manager for ""
	I0916 11:23:24.186688 2228471 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 11:23:24.189368 2228471 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0916 11:23:24.192151 2228471 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0916 11:23:24.196947 2228471 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.31.1/kubectl ...
	I0916 11:23:24.196979 2228471 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0916 11:23:24.253621 2228471 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0916 11:23:24.709441 2228471 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0916 11:23:24.709580 2228471 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0916 11:23:24.709659 2228471 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.31.1/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes kubernetes-upgrade-969540 minikube.k8s.io/updated_at=2024_09_16T11_23_24_0700 minikube.k8s.io/version=v1.34.0 minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed minikube.k8s.io/name=kubernetes-upgrade-969540 minikube.k8s.io/primary=true
	I0916 11:23:25.060624 2228471 ops.go:34] apiserver oom_adj: -16
	I0916 11:23:25.060661 2228471 kubeadm.go:1113] duration metric: took 351.135078ms to wait for elevateKubeSystemPrivileges
	I0916 11:23:25.060676 2228471 kubeadm.go:394] duration metric: took 4m26.191524598s to StartCluster
	I0916 11:23:25.060693 2228471 settings.go:142] acquiring lock: {Name:mk64db776ff04afd364f1babc44c076f59eaa055 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:23:25.060768 2228471 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 11:23:25.061462 2228471 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/19651-2057935/kubeconfig: {Name:mk1e33cd753c7230e8280e70541d55b1f33e20c8 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0916 11:23:25.061676 2228471 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0916 11:23:25.061968 2228471 config.go:182] Loaded profile config "kubernetes-upgrade-969540": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:23:25.062027 2228471 addons.go:507] enable addons start: toEnable=map[ambassador:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false helm-tiller:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0916 11:23:25.062089 2228471 addons.go:69] Setting storage-provisioner=true in profile "kubernetes-upgrade-969540"
	I0916 11:23:25.062105 2228471 addons.go:234] Setting addon storage-provisioner=true in "kubernetes-upgrade-969540"
	W0916 11:23:25.062111 2228471 addons.go:243] addon storage-provisioner should already be in state true
	I0916 11:23:25.062159 2228471 host.go:66] Checking if "kubernetes-upgrade-969540" exists ...
	I0916 11:23:25.062659 2228471 cli_runner.go:164] Run: docker container inspect kubernetes-upgrade-969540 --format={{.State.Status}}
	I0916 11:23:25.063122 2228471 addons.go:69] Setting default-storageclass=true in profile "kubernetes-upgrade-969540"
	I0916 11:23:25.063151 2228471 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "kubernetes-upgrade-969540"
	I0916 11:23:25.063489 2228471 cli_runner.go:164] Run: docker container inspect kubernetes-upgrade-969540 --format={{.State.Status}}
	I0916 11:23:25.065601 2228471 out.go:177] * Verifying Kubernetes components...
	I0916 11:23:25.071828 2228471 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0916 11:23:25.089621 2228471 kapi.go:59] client config for kubernetes-upgrade-969540: &rest.Config{Host:"https://192.168.76.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kubernetes-upgrade-969540/client.crt", KeyFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kubernetes-upgrade-969540/client.key", CAFile:"/home/jenkins/minikube-integration/19651-2057935/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8
(nil), CAData:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1a1e6c0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0916 11:23:25.089961 2228471 addons.go:234] Setting addon default-storageclass=true in "kubernetes-upgrade-969540"
	W0916 11:23:25.089981 2228471 addons.go:243] addon default-storageclass should already be in state true
	I0916 11:23:25.090007 2228471 host.go:66] Checking if "kubernetes-upgrade-969540" exists ...
	I0916 11:23:25.090464 2228471 cli_runner.go:164] Run: docker container inspect kubernetes-upgrade-969540 --format={{.State.Status}}
	I0916 11:23:25.120851 2228471 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0916 11:23:20.871010 2250592 out.go:235] * Creating docker container (CPUs=2, Memory=2048MB) ...
	I0916 11:23:20.871277 2250592 start.go:159] libmachine.API.Create for "force-systemd-env-836951" (driver="docker")
	I0916 11:23:20.871307 2250592 client.go:168] LocalClient.Create starting
	I0916 11:23:20.871380 2250592 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/ca.pem
	I0916 11:23:20.871411 2250592 main.go:141] libmachine: Decoding PEM data...
	I0916 11:23:20.871428 2250592 main.go:141] libmachine: Parsing certificate...
	I0916 11:23:20.871479 2250592 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/19651-2057935/.minikube/certs/cert.pem
	I0916 11:23:20.871499 2250592 main.go:141] libmachine: Decoding PEM data...
	I0916 11:23:20.871509 2250592 main.go:141] libmachine: Parsing certificate...
	I0916 11:23:20.871880 2250592 cli_runner.go:164] Run: docker network inspect force-systemd-env-836951 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0916 11:23:20.892102 2250592 cli_runner.go:211] docker network inspect force-systemd-env-836951 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0916 11:23:20.892190 2250592 network_create.go:284] running [docker network inspect force-systemd-env-836951] to gather additional debugging logs...
	I0916 11:23:20.892214 2250592 cli_runner.go:164] Run: docker network inspect force-systemd-env-836951
	W0916 11:23:20.907846 2250592 cli_runner.go:211] docker network inspect force-systemd-env-836951 returned with exit code 1
	I0916 11:23:20.907881 2250592 network_create.go:287] error running [docker network inspect force-systemd-env-836951]: docker network inspect force-systemd-env-836951: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network force-systemd-env-836951 not found
	I0916 11:23:20.907894 2250592 network_create.go:289] output of [docker network inspect force-systemd-env-836951]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network force-systemd-env-836951 not found
	
	** /stderr **
	I0916 11:23:20.908012 2250592 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0916 11:23:20.927016 2250592 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-941929ec13d1 IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:02:42:32:84:fe:19} reservation:<nil>}
	I0916 11:23:20.927512 2250592 network.go:211] skipping subnet 192.168.58.0/24 that is taken: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName:br-b138f637362d IfaceIPv4:192.168.58.1 IfaceMTU:1500 IfaceMAC:02:42:81:42:5c:08} reservation:<nil>}
	I0916 11:23:20.928174 2250592 network.go:211] skipping subnet 192.168.67.0/24 that is taken: &{IP:192.168.67.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.67.0/24 Gateway:192.168.67.1 ClientMin:192.168.67.2 ClientMax:192.168.67.254 Broadcast:192.168.67.255 IsPrivate:true Interface:{IfaceName:br-c7e139d3d7f3 IfaceIPv4:192.168.67.1 IfaceMTU:1500 IfaceMAC:02:42:92:83:9d:9a} reservation:<nil>}
	I0916 11:23:20.928693 2250592 network.go:211] skipping subnet 192.168.76.0/24 that is taken: &{IP:192.168.76.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.76.0/24 Gateway:192.168.76.1 ClientMin:192.168.76.2 ClientMax:192.168.76.254 Broadcast:192.168.76.255 IsPrivate:true Interface:{IfaceName:br-2434c05a1d9f IfaceIPv4:192.168.76.1 IfaceMTU:1500 IfaceMAC:02:42:d1:fd:66:82} reservation:<nil>}
	I0916 11:23:20.929161 2250592 network.go:211] skipping subnet 192.168.85.0/24 that is taken: &{IP:192.168.85.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.85.0/24 Gateway:192.168.85.1 ClientMin:192.168.85.2 ClientMax:192.168.85.254 Broadcast:192.168.85.255 IsPrivate:true Interface:{IfaceName:br-82d0b324cf4f IfaceIPv4:192.168.85.1 IfaceMTU:1500 IfaceMAC:02:42:d6:d4:2a:58} reservation:<nil>}
	I0916 11:23:20.929716 2250592 network.go:211] skipping subnet 192.168.94.0/24 that is taken: &{IP:192.168.94.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.94.0/24 Gateway:192.168.94.1 ClientMin:192.168.94.2 ClientMax:192.168.94.254 Broadcast:192.168.94.255 IsPrivate:true Interface:{IfaceName:br-66822a5d117b IfaceIPv4:192.168.94.1 IfaceMTU:1500 IfaceMAC:02:42:cf:08:74:bd} reservation:<nil>}
	I0916 11:23:20.930552 2250592 network.go:206] using free private subnet 192.168.103.0/24: &{IP:192.168.103.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.103.0/24 Gateway:192.168.103.1 ClientMin:192.168.103.2 ClientMax:192.168.103.254 Broadcast:192.168.103.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x40016fdb10}
	I0916 11:23:20.930622 2250592 network_create.go:124] attempt to create docker network force-systemd-env-836951 192.168.103.0/24 with gateway 192.168.103.1 and MTU of 1500 ...
	I0916 11:23:20.930732 2250592 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.103.0/24 --gateway=192.168.103.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=force-systemd-env-836951 force-systemd-env-836951
	I0916 11:23:21.046480 2250592 network_create.go:108] docker network force-systemd-env-836951 192.168.103.0/24 created
	I0916 11:23:21.046514 2250592 kic.go:121] calculated static IP "192.168.103.2" for the "force-systemd-env-836951" container
	I0916 11:23:21.046590 2250592 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0916 11:23:21.078609 2250592 cli_runner.go:164] Run: docker volume create force-systemd-env-836951 --label name.minikube.sigs.k8s.io=force-systemd-env-836951 --label created_by.minikube.sigs.k8s.io=true
	I0916 11:23:21.125005 2250592 oci.go:103] Successfully created a docker volume force-systemd-env-836951
	I0916 11:23:21.125094 2250592 cli_runner.go:164] Run: docker run --rm --name force-systemd-env-836951-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=force-systemd-env-836951 --entrypoint /usr/bin/test -v force-systemd-env-836951:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -d /var/lib
	I0916 11:23:21.899794 2250592 oci.go:107] Successfully prepared a docker volume force-systemd-env-836951
	I0916 11:23:21.899846 2250592 preload.go:131] Checking if preload exists for k8s version v1.31.1 and runtime containerd
	I0916 11:23:21.899866 2250592 kic.go:194] Starting extracting preloaded images to volume ...
	I0916 11:23:21.899943 2250592 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.31.1-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v force-systemd-env-836951:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 -I lz4 -xf /preloaded.tar -C /extractDir
	I0916 11:23:25.125425 2228471 addons.go:431] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:23:25.125455 2228471 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0916 11:23:25.125518 2228471 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" kubernetes-upgrade-969540
	I0916 11:23:25.129769 2228471 addons.go:431] installing /etc/kubernetes/addons/storageclass.yaml
	I0916 11:23:25.129818 2228471 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0916 11:23:25.129898 2228471 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" kubernetes-upgrade-969540
	I0916 11:23:25.163798 2228471 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40817 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/kubernetes-upgrade-969540/id_rsa Username:docker}
	I0916 11:23:25.180592 2228471 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40817 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/kubernetes-upgrade-969540/id_rsa Username:docker}
	I0916 11:23:25.279023 2228471 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0916 11:23:25.299932 2228471 api_server.go:52] waiting for apiserver process to appear ...
	I0916 11:23:25.300019 2228471 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:23:25.313258 2228471 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0916 11:23:25.317182 2228471 api_server.go:72] duration metric: took 255.469809ms to wait for apiserver process to appear ...
	I0916 11:23:25.317253 2228471 api_server.go:88] waiting for apiserver healthz status ...
	I0916 11:23:25.317290 2228471 api_server.go:253] Checking apiserver healthz at https://192.168.76.2:8443/healthz ...
	I0916 11:23:25.327691 2228471 api_server.go:279] https://192.168.76.2:8443/healthz returned 200:
	ok
	I0916 11:23:25.341834 2228471 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.31.1/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0916 11:23:25.345386 2228471 api_server.go:141] control plane version: v1.31.1
	I0916 11:23:25.345458 2228471 api_server.go:131] duration metric: took 28.184215ms to wait for apiserver health ...
	I0916 11:23:25.345485 2228471 system_pods.go:43] waiting for kube-system pods to appear ...
	I0916 11:23:25.345577 2228471 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0916 11:23:25.345609 2228471 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0916 11:23:25.356734 2228471 system_pods.go:59] 4 kube-system pods found
	I0916 11:23:25.356831 2228471 system_pods.go:61] "etcd-kubernetes-upgrade-969540" [095f9cb8-c47b-4cd7-b2d2-b8c3b62286ec] Running / Ready:ContainersNotReady (containers with unready status: [etcd]) / ContainersReady:ContainersNotReady (containers with unready status: [etcd])
	I0916 11:23:25.356861 2228471 system_pods.go:61] "kube-apiserver-kubernetes-upgrade-969540" [81eb85a0-8676-424e-9d69-f7f664c96fc8] Running / Ready:ContainersNotReady (containers with unready status: [kube-apiserver]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-apiserver])
	I0916 11:23:25.356881 2228471 system_pods.go:61] "kube-controller-manager-kubernetes-upgrade-969540" [6d6634c8-4b28-40a3-963a-0f3cf2903cda] Running / Ready:ContainersNotReady (containers with unready status: [kube-controller-manager]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-controller-manager])
	I0916 11:23:25.356901 2228471 system_pods.go:61] "kube-scheduler-kubernetes-upgrade-969540" [37cade22-b8ed-4478-938e-cfdd6c79b908] Running / Ready:ContainersNotReady (containers with unready status: [kube-scheduler]) / ContainersReady:ContainersNotReady (containers with unready status: [kube-scheduler])
	I0916 11:23:25.356935 2228471 system_pods.go:74] duration metric: took 11.418551ms to wait for pod list to return data ...
	I0916 11:23:25.356964 2228471 kubeadm.go:582] duration metric: took 295.262432ms to wait for: map[apiserver:true system_pods:true]
	I0916 11:23:25.356988 2228471 node_conditions.go:102] verifying NodePressure condition ...
	I0916 11:23:25.363669 2228471 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0916 11:23:25.363749 2228471 node_conditions.go:123] node cpu capacity is 2
	I0916 11:23:25.363775 2228471 node_conditions.go:105] duration metric: took 6.769528ms to run NodePressure ...
	I0916 11:23:25.363801 2228471 start.go:241] waiting for startup goroutines ...
	I0916 11:23:25.976451 2228471 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0916 11:23:26.008876 2228471 addons.go:510] duration metric: took 946.78486ms for enable addons: enabled=[storage-provisioner default-storageclass]
	I0916 11:23:26.008935 2228471 start.go:246] waiting for cluster config update ...
	I0916 11:23:26.008950 2228471 start.go:255] writing updated cluster config ...
	I0916 11:23:26.010540 2228471 ssh_runner.go:195] Run: rm -f paused
	I0916 11:23:26.027153 2228471 out.go:177] * Done! kubectl is now configured to use "kubernetes-upgrade-969540" cluster and "default" namespace by default
	E0916 11:23:26.038186 2228471 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	dbf1d1255f76e       27e3830e14027       14 seconds ago      Running             etcd                      0                   ffd95ff35017a       etcd-kubernetes-upgrade-969540
	0be2078071a35       279f381cb3736       14 seconds ago      Running             kube-controller-manager   5                   de8678b314329       kube-controller-manager-kubernetes-upgrade-969540
	1354cc54cf325       7f8aa378bb47d       14 seconds ago      Running             kube-scheduler            1                   184790ba946c1       kube-scheduler-kubernetes-upgrade-969540
	871a9eb6aa1fd       d3f53a98c0a9d       14 seconds ago      Running             kube-apiserver            5                   ff1640c2e1f67       kube-apiserver-kubernetes-upgrade-969540
	
	
	==> containerd <==
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.540851786Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.579540703Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.579647123Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.579667784Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.579761207Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.676892764Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:kube-apiserver-kubernetes-upgrade-969540,Uid:026e82fa09849f21a9dba3ccdf45c270,Namespace:kube-system,Attempt:0,} returns sandbox id \"ff1640c2e1f679d32d1e1516da0f12c95acc26d168485882ab4abd889eb233f1\""
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.682230292Z" level=info msg="CreateContainer within sandbox \"ff1640c2e1f679d32d1e1516da0f12c95acc26d168485882ab4abd889eb233f1\" for container &ContainerMetadata{Name:kube-apiserver,Attempt:5,}"
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.715362917Z" level=info msg="CreateContainer within sandbox \"ff1640c2e1f679d32d1e1516da0f12c95acc26d168485882ab4abd889eb233f1\" for &ContainerMetadata{Name:kube-apiserver,Attempt:5,} returns container id \"871a9eb6aa1fddce376e69beaad3a82cf323f4cae575247161d412bdc03c883f\""
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.716158014Z" level=info msg="StartContainer for \"871a9eb6aa1fddce376e69beaad3a82cf323f4cae575247161d412bdc03c883f\""
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.789116296Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:kube-scheduler-kubernetes-upgrade-969540,Uid:2f619d63662b81e56761dc3b5e875c00,Namespace:kube-system,Attempt:0,} returns sandbox id \"184790ba946c1b2585292f09e16f4344196aa101634be9a4b3a7c33dea5e3f37\""
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.798831325Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:kube-controller-manager-kubernetes-upgrade-969540,Uid:5b28a69fb4ba0eb370e15c9c59d21eb9,Namespace:kube-system,Attempt:0,} returns sandbox id \"de8678b3143290e151a3f78a2e2da4d9fac22f737c5531077055595c3d69a0c2\""
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.803901532Z" level=info msg="CreateContainer within sandbox \"de8678b3143290e151a3f78a2e2da4d9fac22f737c5531077055595c3d69a0c2\" for container &ContainerMetadata{Name:kube-controller-manager,Attempt:5,}"
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.804249288Z" level=info msg="CreateContainer within sandbox \"184790ba946c1b2585292f09e16f4344196aa101634be9a4b3a7c33dea5e3f37\" for container &ContainerMetadata{Name:kube-scheduler,Attempt:1,}"
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.816093289Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:etcd-kubernetes-upgrade-969540,Uid:7d35685c6481c345eb35559316bb4e0e,Namespace:kube-system,Attempt:0,} returns sandbox id \"ffd95ff35017a9a1d700c4cbb994fa61d1009f7610e980a7b504b3fa8f531ff9\""
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.837975247Z" level=info msg="CreateContainer within sandbox \"184790ba946c1b2585292f09e16f4344196aa101634be9a4b3a7c33dea5e3f37\" for &ContainerMetadata{Name:kube-scheduler,Attempt:1,} returns container id \"1354cc54cf3252c879eddb18671d3c4a45aa2bbf1931bee848eb9a36ed996f1e\""
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.841952766Z" level=info msg="StartContainer for \"1354cc54cf3252c879eddb18671d3c4a45aa2bbf1931bee848eb9a36ed996f1e\""
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.850823878Z" level=info msg="CreateContainer within sandbox \"ffd95ff35017a9a1d700c4cbb994fa61d1009f7610e980a7b504b3fa8f531ff9\" for container &ContainerMetadata{Name:etcd,Attempt:0,}"
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.861860869Z" level=info msg="CreateContainer within sandbox \"de8678b3143290e151a3f78a2e2da4d9fac22f737c5531077055595c3d69a0c2\" for &ContainerMetadata{Name:kube-controller-manager,Attempt:5,} returns container id \"0be2078071a35163f6d176f3f981f0b56254614d24322299345657740b61e141\""
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.868885870Z" level=info msg="StartContainer for \"0be2078071a35163f6d176f3f981f0b56254614d24322299345657740b61e141\""
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.904611366Z" level=info msg="StartContainer for \"871a9eb6aa1fddce376e69beaad3a82cf323f4cae575247161d412bdc03c883f\" returns successfully"
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.925658834Z" level=info msg="CreateContainer within sandbox \"ffd95ff35017a9a1d700c4cbb994fa61d1009f7610e980a7b504b3fa8f531ff9\" for &ContainerMetadata{Name:etcd,Attempt:0,} returns container id \"dbf1d1255f76e617fcdfbed339b44fb1c06c9f8d6526bd851155b1fcf2748ee7\""
	Sep 16 11:23:13 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:13.926617885Z" level=info msg="StartContainer for \"dbf1d1255f76e617fcdfbed339b44fb1c06c9f8d6526bd851155b1fcf2748ee7\""
	Sep 16 11:23:14 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:14.215166979Z" level=info msg="StartContainer for \"1354cc54cf3252c879eddb18671d3c4a45aa2bbf1931bee848eb9a36ed996f1e\" returns successfully"
	Sep 16 11:23:14 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:14.215277731Z" level=info msg="StartContainer for \"0be2078071a35163f6d176f3f981f0b56254614d24322299345657740b61e141\" returns successfully"
	Sep 16 11:23:14 kubernetes-upgrade-969540 containerd[600]: time="2024-09-16T11:23:14.465378644Z" level=info msg="StartContainer for \"dbf1d1255f76e617fcdfbed339b44fb1c06c9f8d6526bd851155b1fcf2748ee7\" returns successfully"
	
	
	==> describe nodes <==
	Name:               kubernetes-upgrade-969540
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=kubernetes-upgrade-969540
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=90d544f06ea0f69499271b003be64a9a224d57ed
	                    minikube.k8s.io/name=kubernetes-upgrade-969540
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2024_09_16T11_23_24_0700
	                    minikube.k8s.io/version=v1.34.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 16 Sep 2024 11:23:20 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  kubernetes-upgrade-969540
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 16 Sep 2024 11:23:23 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 16 Sep 2024 11:23:23 +0000   Mon, 16 Sep 2024 11:23:14 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 16 Sep 2024 11:23:23 +0000   Mon, 16 Sep 2024 11:23:14 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 16 Sep 2024 11:23:23 +0000   Mon, 16 Sep 2024 11:23:14 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 16 Sep 2024 11:23:23 +0000   Mon, 16 Sep 2024 11:23:20 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.76.2
	  Hostname:    kubernetes-upgrade-969540
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022300Ki
	  pods:               110
	System Info:
	  Machine ID:                 f6bf148f204543f4a39b344daa356870
	  System UUID:                475a14a9-e220-4d5a-b69c-5d3fae13e4a1
	  Boot ID:                    2dbe9824-0858-4a96-a54e-17aa28a3ccfb
	  Kernel Version:             5.15.0-1069-aws
	  OS Image:                   Ubuntu 22.04.4 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.22
	  Kubelet Version:            v1.31.1
	  Kube-Proxy Version:         v1.31.1
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                                 ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-7c65d6cfc9-6dbfd                             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     0s
	  kube-system                 coredns-7c65d6cfc9-vpk2q                             100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     0s
	  kube-system                 etcd-kubernetes-upgrade-969540                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         4s
	  kube-system                 kindnet-8ndz8                                        100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      0s
	  kube-system                 kube-apiserver-kubernetes-upgrade-969540             250m (12%)    0 (0%)      0 (0%)           0 (0%)         7s
	  kube-system                 kube-controller-manager-kubernetes-upgrade-969540    200m (10%)    0 (0%)      0 (0%)           0 (0%)         4s
	  kube-system                 kube-proxy-jjmd7                                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         0s
	  kube-system                 kube-scheduler-kubernetes-upgrade-969540             100m (5%)     0 (0%)      0 (0%)           0 (0%)         7s
	  kube-system                 storage-provisioner                                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         3s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                950m (47%)  100m (5%)
	  memory             290Mi (3%)  390Mi (4%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age   From             Message
	  ----     ------                   ----  ----             -------
	  Normal   Starting                 5s    kubelet          Starting kubelet.
	  Warning  CgroupV1                 5s    kubelet          Cgroup v1 support is in maintenance mode, please migrate to Cgroup v2.
	  Normal   NodeAllocatableEnforced  5s    kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  5s    kubelet          Node kubernetes-upgrade-969540 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    5s    kubelet          Node kubernetes-upgrade-969540 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     5s    kubelet          Node kubernetes-upgrade-969540 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           1s    node-controller  Node kubernetes-upgrade-969540 event: Registered Node kubernetes-upgrade-969540 in Controller
	
	
	==> dmesg <==
	[Sep16 11:22] overlayfs: failed to resolve '/var/lib/containerd/io.containerd.snapshotter.v1.overlayfs/snapshots/25/fs': -2
	
	
	==> etcd [dbf1d1255f76e617fcdfbed339b44fb1c06c9f8d6526bd851155b1fcf2748ee7] <==
	{"level":"info","ts":"2024-09-16T11:23:15.051753Z","caller":"embed/etcd.go:728","msg":"starting with client TLS","tls-info":"cert = /var/lib/minikube/certs/etcd/server.crt, key = /var/lib/minikube/certs/etcd/server.key, client-cert=, client-key=, trusted-ca = /var/lib/minikube/certs/etcd/ca.crt, client-cert-auth = true, crl-file = ","cipher-suites":[]}
	{"level":"info","ts":"2024-09-16T11:23:15.051861Z","caller":"embed/etcd.go:599","msg":"serving peer traffic","address":"192.168.76.2:2380"}
	{"level":"info","ts":"2024-09-16T11:23:15.054965Z","caller":"embed/etcd.go:571","msg":"cmux::serve","address":"192.168.76.2:2380"}
	{"level":"info","ts":"2024-09-16T11:23:15.055598Z","caller":"embed/etcd.go:279","msg":"now serving peer/client/metrics","local-member-id":"ea7e25599daad906","initial-advertise-peer-urls":["https://192.168.76.2:2380"],"listen-peer-urls":["https://192.168.76.2:2380"],"advertise-client-urls":["https://192.168.76.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.76.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2024-09-16T11:23:15.055943Z","caller":"embed/etcd.go:870","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2024-09-16T11:23:15.770727Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 is starting a new election at term 1"}
	{"level":"info","ts":"2024-09-16T11:23:15.771030Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 became pre-candidate at term 1"}
	{"level":"info","ts":"2024-09-16T11:23:15.771131Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 received MsgPreVoteResp from ea7e25599daad906 at term 1"}
	{"level":"info","ts":"2024-09-16T11:23:15.771229Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 became candidate at term 2"}
	{"level":"info","ts":"2024-09-16T11:23:15.771304Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 received MsgVoteResp from ea7e25599daad906 at term 2"}
	{"level":"info","ts":"2024-09-16T11:23:15.771391Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"ea7e25599daad906 became leader at term 2"}
	{"level":"info","ts":"2024-09-16T11:23:15.771476Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: ea7e25599daad906 elected leader ea7e25599daad906 at term 2"}
	{"level":"info","ts":"2024-09-16T11:23:15.774879Z","caller":"etcdserver/server.go:2118","msg":"published local member to cluster through raft","local-member-id":"ea7e25599daad906","local-member-attributes":"{Name:kubernetes-upgrade-969540 ClientURLs:[https://192.168.76.2:2379]}","request-path":"/0/members/ea7e25599daad906/attributes","cluster-id":"6f20f2c4b2fb5f8a","publish-timeout":"7s"}
	{"level":"info","ts":"2024-09-16T11:23:15.775234Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:23:15.775462Z","caller":"etcdserver/server.go:2629","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:23:15.782972Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2024-09-16T11:23:15.783895Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:23:15.787631Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.76.2:2379"}
	{"level":"info","ts":"2024-09-16T11:23:15.795414Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2024-09-16T11:23:15.796472Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2024-09-16T11:23:15.802951Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2024-09-16T11:23:15.803174Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2024-09-16T11:23:15.803402Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"6f20f2c4b2fb5f8a","local-member-id":"ea7e25599daad906","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:23:15.807124Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2024-09-16T11:23:15.807294Z","caller":"etcdserver/server.go:2653","msg":"cluster version is updated","cluster-version":"3.5"}
	
	
	==> kernel <==
	 11:23:29 up 1 day, 15:05,  0 users,  load average: 4.58, 3.72, 2.99
	Linux kubernetes-upgrade-969540 5.15.0-1069-aws #75~20.04.1-Ubuntu SMP Mon Aug 19 16:22:47 UTC 2024 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.4 LTS"
	
	
	==> kube-apiserver [871a9eb6aa1fddce376e69beaad3a82cf323f4cae575247161d412bdc03c883f] <==
	I0916 11:23:20.611183       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0916 11:23:20.611409       1 policy_source.go:224] refreshing policies
	I0916 11:23:20.611700       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0916 11:23:20.611803       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0916 11:23:20.616380       1 controller.go:615] quota admission added evaluator for: namespaces
	I0916 11:23:20.618748       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0916 11:23:20.619454       1 cache.go:39] Caches are synced for LocalAvailability controller
	E0916 11:23:20.636648       1 controller.go:145] "Failed to ensure lease exists, will retry" err="namespaces \"kube-system\" not found" interval="200ms"
	E0916 11:23:20.638596       1 controller.go:148] "Unhandled Error" err="while syncing ConfigMap \"kube-system/kube-apiserver-legacy-service-account-token-tracking\", err: namespaces \"kube-system\" not found" logger="UnhandledError"
	I0916 11:23:20.807329       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0916 11:23:20.830854       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0916 11:23:20.830882       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0916 11:23:20.861888       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0916 11:23:22.128708       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0916 11:23:22.204336       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0916 11:23:22.327523       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0916 11:23:22.394398       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.76.2]
	I0916 11:23:22.395658       1 controller.go:615] quota admission added evaluator for: endpoints
	I0916 11:23:22.403948       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0916 11:23:22.549936       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0916 11:23:23.583874       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0916 11:23:23.601110       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0916 11:23:23.620512       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0916 11:23:28.136629       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	I0916 11:23:28.414079       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	
	
	==> kube-controller-manager [0be2078071a35163f6d176f3f981f0b56254614d24322299345657740b61e141] <==
	I0916 11:23:27.366401       1 shared_informer.go:320] Caches are synced for deployment
	I0916 11:23:27.370139       1 shared_informer.go:320] Caches are synced for endpoint_slice_mirroring
	I0916 11:23:27.386070       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0916 11:23:27.386151       1 shared_informer.go:320] Caches are synced for ephemeral
	I0916 11:23:27.386462       1 shared_informer.go:320] Caches are synced for validatingadmissionpolicy-status
	I0916 11:23:27.459292       1 shared_informer.go:313] Waiting for caches to sync for garbage collector
	I0916 11:23:27.485786       1 range_allocator.go:422] "Set node PodCIDR" logger="node-ipam-controller" node="kubernetes-upgrade-969540" podCIDRs=["10.244.0.0/24"]
	I0916 11:23:27.485828       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="kubernetes-upgrade-969540"
	I0916 11:23:27.509965       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="kubernetes-upgrade-969540"
	I0916 11:23:27.521278       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 11:23:27.531757       1 shared_informer.go:320] Caches are synced for resource quota
	I0916 11:23:27.538240       1 shared_informer.go:320] Caches are synced for disruption
	I0916 11:23:27.553457       1 shared_informer.go:320] Caches are synced for stateful set
	I0916 11:23:27.566092       1 shared_informer.go:320] Caches are synced for attach detach
	I0916 11:23:27.566512       1 shared_informer.go:320] Caches are synced for persistent volume
	I0916 11:23:27.719206       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="kubernetes-upgrade-969540"
	I0916 11:23:27.871522       1 range_allocator.go:241] "Successfully synced" logger="node-ipam-controller" key="kubernetes-upgrade-969540"
	I0916 11:23:27.963353       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 11:23:27.985713       1 shared_informer.go:320] Caches are synced for garbage collector
	I0916 11:23:27.987250       1 garbagecollector.go:157] "All resource monitors have synced. Proceeding to collect garbage" logger="garbage-collector-controller"
	I0916 11:23:28.563045       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="117.378283ms"
	I0916 11:23:28.586192       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="23.096105ms"
	I0916 11:23:28.681131       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="94.884341ms"
	I0916 11:23:28.681268       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="84.365µs"
	I0916 11:23:28.681703       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-7c65d6cfc9" duration="35.857µs"
	
	
	==> kube-scheduler [1354cc54cf3252c879eddb18671d3c4a45aa2bbf1931bee848eb9a36ed996f1e] <==
	I0916 11:23:20.533332       1 serving.go:386] Generated self-signed cert in-memory
	I0916 11:23:22.321664       1 server.go:167] "Starting Kubernetes Scheduler" version="v1.31.1"
	I0916 11:23:22.323054       1 server.go:169] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0916 11:23:22.332144       1 secure_serving.go:213] Serving securely on 127.0.0.1:10259
	I0916 11:23:22.335429       1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController
	I0916 11:23:22.363503       1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController
	I0916 11:23:22.335950       1 tlsconfig.go:243] "Starting DynamicServingCertificateController"
	I0916 11:23:22.362232       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file"
	I0916 11:23:22.370045       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 11:23:22.362208       1 configmap_cafile_content.go:205] "Starting controller" name="client-ca::kube-system::extension-apiserver-authentication::client-ca-file"
	I0916 11:23:22.370245       1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	I0916 11:23:22.464707       1 shared_informer.go:320] Caches are synced for RequestHeaderAuthRequestController
	I0916 11:23:22.471144       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file
	I0916 11:23:22.473106       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Sep 16 11:23:27 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:27.872264    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/0b7222d5-253e-4822-9f41-46cc30be6266-tmp\") pod \"storage-provisioner\" (UID: \"0b7222d5-253e-4822-9f41-46cc30be6266\") " pod="kube-system/storage-provisioner"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: E0916 11:23:28.234207    6178 projected.go:288] Couldn't get configMap kube-system/kube-root-ca.crt: configmap "kube-root-ca.crt" not found
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: E0916 11:23:28.234253    6178 projected.go:194] Error preparing data for projected volume kube-api-access-8cwks for pod kube-system/storage-provisioner: configmap "kube-root-ca.crt" not found
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: E0916 11:23:28.235897    6178 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0b7222d5-253e-4822-9f41-46cc30be6266-kube-api-access-8cwks podName:0b7222d5-253e-4822-9f41-46cc30be6266 nodeName:}" failed. No retries permitted until 2024-09-16 11:23:28.735208975 +0000 UTC m=+5.335307658 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-8cwks" (UniqueName: "kubernetes.io/projected/0b7222d5-253e-4822-9f41-46cc30be6266-kube-api-access-8cwks") pod "storage-provisioner" (UID: "0b7222d5-253e-4822-9f41-46cc30be6266") : configmap "kube-root-ca.crt" not found
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.505308    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-cfg\" (UniqueName: \"kubernetes.io/host-path/87e34b80-7305-4e2d-836a-880924405da3-cni-cfg\") pod \"kindnet-8ndz8\" (UID: \"87e34b80-7305-4e2d-836a-880924405da3\") " pod="kube-system/kindnet-8ndz8"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.511573    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4f5k\" (UniqueName: \"kubernetes.io/projected/87e34b80-7305-4e2d-836a-880924405da3-kube-api-access-d4f5k\") pod \"kindnet-8ndz8\" (UID: \"87e34b80-7305-4e2d-836a-880924405da3\") " pod="kube-system/kindnet-8ndz8"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.512500    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rzlt\" (UniqueName: \"kubernetes.io/projected/f5170af5-640d-4bb1-b724-d2ee962c7471-kube-api-access-7rzlt\") pod \"kube-proxy-jjmd7\" (UID: \"f5170af5-640d-4bb1-b724-d2ee962c7471\") " pod="kube-system/kube-proxy-jjmd7"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.512645    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f5170af5-640d-4bb1-b724-d2ee962c7471-lib-modules\") pod \"kube-proxy-jjmd7\" (UID: \"f5170af5-640d-4bb1-b724-d2ee962c7471\") " pod="kube-system/kube-proxy-jjmd7"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.513202    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/87e34b80-7305-4e2d-836a-880924405da3-lib-modules\") pod \"kindnet-8ndz8\" (UID: \"87e34b80-7305-4e2d-836a-880924405da3\") " pod="kube-system/kindnet-8ndz8"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.513856    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/87e34b80-7305-4e2d-836a-880924405da3-xtables-lock\") pod \"kindnet-8ndz8\" (UID: \"87e34b80-7305-4e2d-836a-880924405da3\") " pod="kube-system/kindnet-8ndz8"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.514034    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-proxy\" (UniqueName: \"kubernetes.io/configmap/f5170af5-640d-4bb1-b724-d2ee962c7471-kube-proxy\") pod \"kube-proxy-jjmd7\" (UID: \"f5170af5-640d-4bb1-b724-d2ee962c7471\") " pod="kube-system/kube-proxy-jjmd7"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.514177    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"xtables-lock\" (UniqueName: \"kubernetes.io/host-path/f5170af5-640d-4bb1-b724-d2ee962c7471-xtables-lock\") pod \"kube-proxy-jjmd7\" (UID: \"f5170af5-640d-4bb1-b724-d2ee962c7471\") " pod="kube-system/kube-proxy-jjmd7"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.615886    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c27c2949-2794-42ef-8e65-6a43760e6477-config-volume\") pod \"coredns-7c65d6cfc9-6dbfd\" (UID: \"c27c2949-2794-42ef-8e65-6a43760e6477\") " pod="kube-system/coredns-7c65d6cfc9-6dbfd"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.615982    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f5682cbf-b6b8-4fd1-9d90-c872aa69d306-config-volume\") pod \"coredns-7c65d6cfc9-vpk2q\" (UID: \"f5682cbf-b6b8-4fd1-9d90-c872aa69d306\") " pod="kube-system/coredns-7c65d6cfc9-vpk2q"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.616020    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86kcx\" (UniqueName: \"kubernetes.io/projected/f5682cbf-b6b8-4fd1-9d90-c872aa69d306-kube-api-access-86kcx\") pod \"coredns-7c65d6cfc9-vpk2q\" (UID: \"f5682cbf-b6b8-4fd1-9d90-c872aa69d306\") " pod="kube-system/coredns-7c65d6cfc9-vpk2q"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.616051    6178 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9qvw\" (UniqueName: \"kubernetes.io/projected/c27c2949-2794-42ef-8e65-6a43760e6477-kube-api-access-q9qvw\") pod \"coredns-7c65d6cfc9-6dbfd\" (UID: \"c27c2949-2794-42ef-8e65-6a43760e6477\") " pod="kube-system/coredns-7c65d6cfc9-6dbfd"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: I0916 11:23:28.666525    6178 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: E0916 11:23:28.899413    6178 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"4ca26d8d48f079f07204579fcb76477a19d278d35505182e26cd0bfadb39f5f4\": failed to find network info for sandbox \"4ca26d8d48f079f07204579fcb76477a19d278d35505182e26cd0bfadb39f5f4\""
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: E0916 11:23:28.899514    6178 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"4ca26d8d48f079f07204579fcb76477a19d278d35505182e26cd0bfadb39f5f4\": failed to find network info for sandbox \"4ca26d8d48f079f07204579fcb76477a19d278d35505182e26cd0bfadb39f5f4\"" pod="kube-system/coredns-7c65d6cfc9-vpk2q"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: E0916 11:23:28.899553    6178 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"4ca26d8d48f079f07204579fcb76477a19d278d35505182e26cd0bfadb39f5f4\": failed to find network info for sandbox \"4ca26d8d48f079f07204579fcb76477a19d278d35505182e26cd0bfadb39f5f4\"" pod="kube-system/coredns-7c65d6cfc9-vpk2q"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: E0916 11:23:28.899643    6178 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-vpk2q_kube-system(f5682cbf-b6b8-4fd1-9d90-c872aa69d306)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-vpk2q_kube-system(f5682cbf-b6b8-4fd1-9d90-c872aa69d306)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"4ca26d8d48f079f07204579fcb76477a19d278d35505182e26cd0bfadb39f5f4\\\": failed to find network info for sandbox \\\"4ca26d8d48f079f07204579fcb76477a19d278d35505182e26cd0bfadb39f5f4\\\"\"" pod="kube-system/coredns-7c65d6cfc9-vpk2q" podUID="f5682cbf-b6b8-4fd1-9d90-c872aa69d306"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: E0916 11:23:28.953995    6178 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"0240dbdd785c71d101d40e604c0c777e9cb51b9208df952565313737d37cae5e\": failed to find network info for sandbox \"0240dbdd785c71d101d40e604c0c777e9cb51b9208df952565313737d37cae5e\""
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: E0916 11:23:28.954121    6178 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"0240dbdd785c71d101d40e604c0c777e9cb51b9208df952565313737d37cae5e\": failed to find network info for sandbox \"0240dbdd785c71d101d40e604c0c777e9cb51b9208df952565313737d37cae5e\"" pod="kube-system/coredns-7c65d6cfc9-6dbfd"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: E0916 11:23:28.954189    6178 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"0240dbdd785c71d101d40e604c0c777e9cb51b9208df952565313737d37cae5e\": failed to find network info for sandbox \"0240dbdd785c71d101d40e604c0c777e9cb51b9208df952565313737d37cae5e\"" pod="kube-system/coredns-7c65d6cfc9-6dbfd"
	Sep 16 11:23:28 kubernetes-upgrade-969540 kubelet[6178]: E0916 11:23:28.954288    6178 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-7c65d6cfc9-6dbfd_kube-system(c27c2949-2794-42ef-8e65-6a43760e6477)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-7c65d6cfc9-6dbfd_kube-system(c27c2949-2794-42ef-8e65-6a43760e6477)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"0240dbdd785c71d101d40e604c0c777e9cb51b9208df952565313737d37cae5e\\\": failed to find network info for sandbox \\\"0240dbdd785c71d101d40e604c0c777e9cb51b9208df952565313737d37cae5e\\\"\"" pod="kube-system/coredns-7c65d6cfc9-6dbfd" podUID="c27c2949-2794-42ef-8e65-6a43760e6477"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p kubernetes-upgrade-969540 -n kubernetes-upgrade-969540
helpers_test.go:261: (dbg) Run:  kubectl --context kubernetes-upgrade-969540 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:261: (dbg) Non-zero exit: kubectl --context kubernetes-upgrade-969540 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error (692.32µs)
helpers_test.go:263: kubectl --context kubernetes-upgrade-969540 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running: fork/exec /usr/local/bin/kubectl: exec format error
helpers_test.go:175: Cleaning up "kubernetes-upgrade-969540" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p kubernetes-upgrade-969540
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p kubernetes-upgrade-969540: (2.354750599s)
--- FAIL: TestKubernetesUpgrade (346.35s)

                                                
                                    
x
+
TestNetworkPlugins/group/custom-flannel/NetCatPod (7200.07s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/custom-flannel/NetCatPod
net_test.go:149: (dbg) Run:  kubectl --context custom-flannel-430967 replace --force -f testdata/netcat-deployment.yaml
net_test.go:149: (dbg) Non-zero exit: kubectl --context custom-flannel-430967 replace --force -f testdata/netcat-deployment.yaml: fork/exec /usr/local/bin/kubectl: exec format error (491.721µs)
net_test.go:151: failed to apply netcat manifest: fork/exec /usr/local/bin/kubectl: exec format error
E0916 12:14:07.672511 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:15:09.534989 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/default-k8s-diff-port-839735/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:10.744074 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:13.777521 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:13.783943 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:13.795279 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:13.816722 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:13.858174 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:13.939600 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:14.101449 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:14.423219 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:15.064508 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:16.346084 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:18.907747 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:24.029777 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:34.271899 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:35.213075 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:35.219423 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:35.230764 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:35.252061 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:35.293508 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:35.374868 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:35.536579 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:35.858488 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:36.500502 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:37.782303 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:40.343652 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:45.465352 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:54.754176 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:17:55.707488 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:18:12.605281 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/default-k8s-diff-port-839735/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:18:14.804275 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/no-preload-472762/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:18:16.188810 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:18:33.956776 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:18:35.715538 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:18:48.789738 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/old-k8s-version-697499/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:18:57.150577 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:19:07.672087 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:19:57.636910 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:20:09.535071 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/default-k8s-diff-port-839735/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:20:19.071955 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:13.778568 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:35.213580 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:22:41.479165 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:23:02.913590 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:23:14.804516 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/no-preload-472762/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:23:33.956946 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:23:48.789217 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/old-k8s-version-697499/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:24:07.672574 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:25:09.535068 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/default-k8s-diff-port-839735/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:27:13.778325 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/auto-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:27:35.213220 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/kindnet-430967/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:27:57.876314 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/no-preload-472762/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:28:14.804778 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/no-preload-472762/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:28:17.036060 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:28:31.857231 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/old-k8s-version-697499/client.crt: no such file or directory" logger="UnhandledError"
E0916 12:28:33.956808 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
panic: test timed out after 2h0m0s
	running tests:
		TestNetworkPlugins (1h5m38s)
		TestNetworkPlugins/group/calico (16m19s)
		TestNetworkPlugins/group/calico/NetCatPod (14m57s)
		TestNetworkPlugins/group/custom-flannel (15m47s)
		TestNetworkPlugins/group/custom-flannel/NetCatPod (14m53s)
		TestStartStop (1h6m15s)
		TestStartStop/group (47m7s)

                                                
                                                
goroutine 5256 [running]:
testing.(*M).startAlarm.func1()
	/usr/local/go/src/testing/testing.go:2373 +0x30c
created by time.goFunc
	/usr/local/go/src/time/sleep.go:215 +0x38

                                                
                                                
goroutine 1 [chan receive, 61 minutes]:
testing.tRunner.func1()
	/usr/local/go/src/testing/testing.go:1651 +0x434
testing.tRunner(0x40000ed860, 0x40006e1bb8)
	/usr/local/go/src/testing/testing.go:1696 +0x120
testing.runTests(0x40004fa4e0, {0x47d1fe0, 0x2b, 0x2b}, {0x40006e1d08?, 0x11fc54?, 0x47f5c60?})
	/usr/local/go/src/testing/testing.go:2166 +0x3ac
testing.(*M).Run(0x4000af9180)
	/usr/local/go/src/testing/testing.go:2034 +0x588
k8s.io/minikube/test/integration.TestMain(0x4000af9180)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/main_test.go:62 +0x84
main.main()
	_testmain.go:131 +0x98

                                                
                                                
goroutine 5 [select]:
go.opencensus.io/stats/view.(*worker).start(0x40000d6280)
	/var/lib/jenkins/go/pkg/mod/go.opencensus.io@v0.24.0/stats/view/worker.go:292 +0x88
created by go.opencensus.io/stats/view.init.0 in goroutine 1
	/var/lib/jenkins/go/pkg/mod/go.opencensus.io@v0.24.0/stats/view/worker.go:34 +0x98

                                                
                                                
goroutine 2680 [select, 5 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 2679
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 118 [chan receive, 116 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x4000496ec0, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 106
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 117 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 106
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 9 [select]:
k8s.io/klog/v2.(*flushDaemon).run.func1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/klog/v2@v2.130.1/klog.go:1141 +0xe0
created by k8s.io/klog/v2.(*flushDaemon).run in goroutine 8
	/var/lib/jenkins/go/pkg/mod/k8s.io/klog/v2@v2.130.1/klog.go:1137 +0x198

                                                
                                                
goroutine 2081 [chan receive, 67 minutes]:
testing.(*T).Run(0x400142f040, {0x2395ecb?, 0x40012d0f58?}, 0x2e80e50)
	/usr/local/go/src/testing/testing.go:1751 +0x328
k8s.io/minikube/test/integration.TestStartStop(0x400142f040)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/start_stop_delete_test.go:46 +0x3c
testing.tRunner(0x400142f040, 0x2e80c58)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 107 [sync.Cond.Wait]:
sync.runtime_notifyListWait(0x4000496e90, 0x2d)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x4000496e80)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x4000496ec0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x40011be560, {0x31d56c0, 0x4000ad84e0}, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x40011be560, 0x3b9aca00, 0x0, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 118
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 853 [select, 5 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 852
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2322 [chan receive, 16 minutes]:
testing.tRunner.func1()
	/usr/local/go/src/testing/testing.go:1651 +0x434
testing.tRunner(0x400141d1e0, 0x4001478240)
	/usr/local/go/src/testing/testing.go:1696 +0x120
created by testing.(*T).Run in goroutine 2024
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 108 [select]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x400005e380}, 0x400117cf40, 0x4001437f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x400005e380}, 0x0?, 0x400117cf40, 0x400117cf88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x400005e380?}, 0x0?, 0x0?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x40007c0600?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 118
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 109 [select]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 108
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 660 [IO wait, 103 minutes]:
internal/poll.runtime_pollWait(0xffff54ab1d80, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0xa0
internal/poll.(*pollDesc).wait(0x40005e6b00?, 0x10?, 0x0)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x28
internal/poll.(*pollDesc).waitRead(...)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:89
internal/poll.(*FD).Accept(0x40005e6b00)
	/usr/local/go/src/internal/poll/fd_unix.go:620 +0x24c
net.(*netFD).accept(0x40005e6b00)
	/usr/local/go/src/net/fd_unix.go:172 +0x28
net.(*TCPListener).accept(0x40000e3000)
	/usr/local/go/src/net/tcpsock_posix.go:159 +0x28
net.(*TCPListener).Accept(0x40000e3000)
	/usr/local/go/src/net/tcpsock.go:372 +0x2c
net/http.(*Server).Serve(0x40014764b0, {0x31ef6e0, 0x40000e3000})
	/usr/local/go/src/net/http/server.go:3330 +0x294
net/http.(*Server).ListenAndServe(0x40014764b0)
	/usr/local/go/src/net/http/server.go:3259 +0x84
k8s.io/minikube/test/integration.startHTTPProxy.func1(0x40007c1b00?, 0x400141c9c0)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/functional_test.go:2213 +0x20
created by k8s.io/minikube/test/integration.startHTTPProxy in goroutine 658
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/functional_test.go:2212 +0x11c

                                                
                                                
goroutine 826 [chan send, 97 minutes]:
os/exec.(*Cmd).watchCtx(0x40007c0f00, 0x40001033b0)
	/usr/local/go/src/os/exec/exec.go:798 +0x2c8
created by os/exec.(*Cmd).Start in goroutine 825
	/usr/local/go/src/os/exec/exec.go:759 +0x78c

                                                
                                                
goroutine 829 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 828
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 2310 [chan receive, 48 minutes]:
testing.(*testContext).waitParallel(0x40007f0910)
	/usr/local/go/src/testing/testing.go:1818 +0x158
testing.tRunner.func1()
	/usr/local/go/src/testing/testing.go:1666 +0x530
testing.tRunner(0x400142e680, 0x2e80e50)
	/usr/local/go/src/testing/testing.go:1696 +0x120
created by testing.(*T).Run in goroutine 2081
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 2858 [sync.Cond.Wait, 3 minutes]:
sync.runtime_notifyListWait(0x4001675b90, 0x1b)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x4001675b80)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x4001675bc0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x40016a1d40, {0x31d56c0, 0x40013395f0}, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x40016a1d40, 0x3b9aca00, 0x0, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2855
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2024 [chan receive, 67 minutes]:
testing.(*T).Run(0x400142e1a0, {0x2395ecb?, 0x7f77c78d370f?}, 0x4001478240)
	/usr/local/go/src/testing/testing.go:1751 +0x328
k8s.io/minikube/test/integration.TestNetworkPlugins(0x400142e1a0)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:52 +0xcc
testing.tRunner(0x400142e1a0, 0x2e80c10)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 3034 [chan receive, 46 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x4001674480, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 3032
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 2586 [select]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x400005e380}, 0x400128cf40, 0x400128cf88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x400005e380}, 0xf0?, 0x400128cf40, 0x400128cf88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x400005e380?}, 0x40004fcb60?, 0x4000101e50?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x40007c0d80?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2609
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 2854 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 2853
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 2860 [select, 3 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 2859
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 851 [sync.Cond.Wait, 5 minutes]:
sync.runtime_notifyListWait(0x40000e37d0, 0x28)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x40000e37c0)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x40000e3800)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x40016a1500, {0x31d56c0, 0x4001f75440}, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x40016a1500, 0x3b9aca00, 0x0, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 830
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 986 [chan send, 97 minutes]:
os/exec.(*Cmd).watchCtx(0x4000213b00, 0x4001544a80)
	/usr/local/go/src/os/exec/exec.go:798 +0x2c8
created by os/exec.(*Cmd).Start in goroutine 788
	/usr/local/go/src/os/exec/exec.go:759 +0x78c

                                                
                                                
goroutine 830 [chan receive, 99 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x40000e3800, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 828
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 2678 [sync.Cond.Wait]:
sync.runtime_notifyListWait(0x4000497710, 0x1d)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x4000497700)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x4000497740)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x400178c0a0, {0x31d56c0, 0x4001f752c0}, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x400178c0a0, 0x3b9aca00, 0x0, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2652
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 3036 [sync.Cond.Wait]:
sync.runtime_notifyListWait(0x4001674410, 0x19)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x4001674400)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x4001674480)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x400178c0c0, {0x31d56c0, 0x40014a6000}, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x400178c0c0, 0x3b9aca00, 0x0, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 3034
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 907 [chan send, 97 minutes]:
os/exec.(*Cmd).watchCtx(0x40001a7200, 0x4001332460)
	/usr/local/go/src/os/exec/exec.go:798 +0x2c8
created by os/exec.(*Cmd).Start in goroutine 906
	/usr/local/go/src/os/exec/exec.go:759 +0x78c

                                                
                                                
goroutine 3037 [select]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x400005e380}, 0x40016bef40, 0x40012c0f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x400005e380}, 0xc0?, 0x40016bef40, 0x40016bef88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x400005e380?}, 0x0?, 0x0?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x400140c600?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 3034
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 3038 [select]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 3037
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 5154 [select, 5 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 5153
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2381 [chan receive, 67 minutes]:
testing.(*testContext).waitParallel(0x40007f0910)
	/usr/local/go/src/testing/testing.go:1818 +0x158
testing.(*T).Parallel(0x400142f860)
	/usr/local/go/src/testing/testing.go:1485 +0x1b8
k8s.io/minikube/test/integration.MaybeParallel(0x400142f860)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/helpers_test.go:483 +0x40
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x400142f860)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:106 +0x2ec
testing.tRunner(0x400142f860, 0x40000d6180)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2322
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 3051 [chan receive, 48 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x40000e2d40, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 3049
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 3056 [select]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 3055
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 852 [select, 5 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x400005e380}, 0x4000082f40, 0x40012d5f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x400005e380}, 0x38?, 0x4000082f40, 0x4000082f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x400005e380?}, 0x400163e180?, 0x4001559040?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x40007c0c00?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 830
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 3033 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 3032
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 3055 [select]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x400005e380}, 0x40013ccf40, 0x40012c5f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x400005e380}, 0xa8?, 0x40013ccf40, 0x40013ccf88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x400005e380?}, 0x0?, 0x0?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4001300000?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 3051
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 997 [chan send, 97 minutes]:
os/exec.(*Cmd).watchCtx(0x40007c0d80, 0x4001332b60)
	/usr/local/go/src/os/exec/exec.go:798 +0x2c8
created by os/exec.(*Cmd).Start in goroutine 996
	/usr/local/go/src/os/exec/exec.go:759 +0x78c

                                                
                                                
goroutine 5136 [sync.Cond.Wait, 5 minutes]:
sync.runtime_notifyListWait(0x4001675650, 0x2)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x4001675640)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x4001675680)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x40007f44f0, {0x31d56c0, 0x4000ad99b0}, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x40007f44f0, 0x3b9aca00, 0x0, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 5137
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 5123 [select]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 5122
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 3054 [sync.Cond.Wait]:
sync.runtime_notifyListWait(0x40000e2bd0, 0x19)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x40000e2bc0)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x40000e2d40)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x40016a0e60, {0x31d56c0, 0x40012f8e10}, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x40016a0e60, 0x3b9aca00, 0x0, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 3051
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2382 [chan receive, 67 minutes]:
testing.(*testContext).waitParallel(0x40007f0910)
	/usr/local/go/src/testing/testing.go:1818 +0x158
testing.(*T).Parallel(0x400142fa00)
	/usr/local/go/src/testing/testing.go:1485 +0x1b8
k8s.io/minikube/test/integration.MaybeParallel(0x400142fa00)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/helpers_test.go:483 +0x40
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x400142fa00)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:106 +0x2ec
testing.tRunner(0x400142fa00, 0x40000d6300)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2322
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 2859 [select, 3 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x400005e380}, 0x400157af40, 0x40012c2f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x400005e380}, 0x14?, 0x400157af40, 0x400157af88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x400005e380?}, 0x3966636665323165?, 0x6331313365303664?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4001a8de60?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2855
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 2609 [chan receive, 56 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x4001674640, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 2607
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 2380 [chan receive, 67 minutes]:
testing.(*testContext).waitParallel(0x40007f0910)
	/usr/local/go/src/testing/testing.go:1818 +0x158
testing.(*T).Parallel(0x400142f6c0)
	/usr/local/go/src/testing/testing.go:1485 +0x1b8
k8s.io/minikube/test/integration.MaybeParallel(0x400142f6c0)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/helpers_test.go:483 +0x40
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x400142f6c0)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:106 +0x2ec
testing.tRunner(0x400142f6c0, 0x40000d6100)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2322
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 5101 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 5100
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 5119 [select, 15 minutes]:
k8s.io/client-go/tools/watch.UntilWithoutRetry({0x31fc998, 0x4000463180}, {0x31e46a0, 0x400164c640}, {0x4001599d98, 0x1, 0x400170af00?})
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/tools/watch/until.go:73 +0x1fc
k8s.io/minikube/pkg/kapi.WaitForDeploymentToStabilize({0x32312a8, 0x400146e380}, {0x2399e9a, 0x7}, {0x2397572, 0x6}, 0xd18c2e2800)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/pkg/kapi/kapi.go:125 +0x418
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1.4(0x4001300680)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:159 +0x248
testing.tRunner(0x4001300680, 0x4001548db0)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2420
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 2420 [chan receive, 15 minutes]:
testing.(*T).Run(0x400141d6c0, {0x239ee85?, 0x31cbd58?}, 0x4001548db0)
	/usr/local/go/src/testing/testing.go:1751 +0x328
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x400141d6c0)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:148 +0x778
testing.tRunner(0x400141d6c0, 0x40015f9100)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2322
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 2651 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 2650
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 2421 [chan receive, 15 minutes]:
testing.(*T).Run(0x400141d860, {0x239ee85?, 0x31cbd58?}, 0x40004a6210)
	/usr/local/go/src/testing/testing.go:1751 +0x328
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x400141d860)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:148 +0x778
testing.tRunner(0x400141d860, 0x40015f9180)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2322
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 5153 [select, 5 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x400005e380}, 0x4001579f40, 0x40012cef88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x400005e380}, 0xd8?, 0x4001579f40, 0x4001579f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x400005e380?}, 0x400179c780?, 0x40015ab180?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4000213800?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 5137
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 3050 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 3049
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 2585 [sync.Cond.Wait]:
sync.runtime_notifyListWait(0x4001674610, 0x1e)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x4001674600)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x4001674640)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x400178c020, {0x31d56c0, 0x40014a6030}, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x400178c020, 0x3b9aca00, 0x0, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2609
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2652 [chan receive, 55 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x4000497740, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 2650
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 2855 [chan receive, 49 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x4001675bc0, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 2853
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 2587 [select]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 2586
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2679 [select, 5 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x400005e380}, 0x40013cf740, 0x400066ff88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x400005e380}, 0x98?, 0x40013cf740, 0x40013cf788)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x400005e380?}, 0x400005e380?, 0x31392d6c616e6f69?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x8d7b4?, 0x4000213800?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2652
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 2608 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 2607
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 5142 [sync.Cond.Wait, 15 minutes]:
sync.runtime_notifyListWait(0x40013f4648, 0x0)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x40013f4638)
	/usr/local/go/src/sync/cond.go:71 +0xcc
golang.org/x/net/http2.(*pipe).Read(0x40013f4630, {0x40015ab400, 0x200, 0x200})
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/pipe.go:76 +0x104
golang.org/x/net/http2.transportResponseBody.Read({0x0?}, {0x40015ab400?, 0x400157a458?, 0x1db2c?})
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:2637 +0x50
encoding/json.(*Decoder).refill(0x4000453040)
	/usr/local/go/src/encoding/json/stream.go:165 +0x164
encoding/json.(*Decoder).readValue(0x4000453040)
	/usr/local/go/src/encoding/json/stream.go:140 +0x74
encoding/json.(*Decoder).Decode(0x4000453040, {0x1f8bea0, 0x400132ce10})
	/usr/local/go/src/encoding/json/stream.go:63 +0x5c
k8s.io/apimachinery/pkg/util/framer.(*jsonFrameReader).Read(0x40013dcd50, {0x4001278400, 0x400, 0x400})
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/framer/framer.go:151 +0x168
k8s.io/apimachinery/pkg/runtime/serializer/streaming.(*decoder).Decode(0x40007c4ff0, 0x0, {0x31e2f98, 0x400164c680})
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/runtime/serializer/streaming/streaming.go:77 +0x88
k8s.io/client-go/rest/watch.(*Decoder).Decode(0x4001650540)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/rest/watch/decoder.go:49 +0x5c
k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0x400164c640)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/watch/streamwatcher.go:105 +0xa8
created by k8s.io/apimachinery/pkg/watch.NewStreamWatcher in goroutine 5119
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/watch/streamwatcher.go:76 +0x104

                                                
                                                
goroutine 5156 [IO wait]:
internal/poll.runtime_pollWait(0xffff54ab1018, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0xa0
internal/poll.(*pollDesc).wait(0x4001649700?, 0x40013a3800?, 0x0)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x28
internal/poll.(*pollDesc).waitRead(...)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:89
internal/poll.(*FD).Read(0x4001649700, {0x40013a3800, 0x800, 0x800})
	/usr/local/go/src/internal/poll/fd_unix.go:165 +0x1fc
net.(*netFD).Read(0x4001649700, {0x40013a3800?, 0x18?, 0x10?})
	/usr/local/go/src/net/fd_posix.go:55 +0x28
net.(*conn).Read(0x400138a7f0, {0x40013a3800?, 0x4001275968?, 0x30f588?})
	/usr/local/go/src/net/net.go:189 +0x34
crypto/tls.(*atLeastReader).Read(0x400132c438, {0x40013a3800?, 0x0?, 0x400132c438?})
	/usr/local/go/src/crypto/tls/conn.go:809 +0x40
bytes.(*Buffer).ReadFrom(0x400144d7b8, {0x31d5f00, 0x400132c438})
	/usr/local/go/src/bytes/buffer.go:211 +0x90
crypto/tls.(*Conn).readFromUntil(0x400144d508, {0xffff5475a960, 0x400132cd98}, 0x0?)
	/usr/local/go/src/crypto/tls/conn.go:831 +0xd0
crypto/tls.(*Conn).readRecordOrCCS(0x400144d508, 0x0)
	/usr/local/go/src/crypto/tls/conn.go:629 +0x35c
crypto/tls.(*Conn).readRecord(...)
	/usr/local/go/src/crypto/tls/conn.go:591
crypto/tls.(*Conn).Read(0x400144d508, {0x4001535000, 0x1000, 0x17d74?})
	/usr/local/go/src/crypto/tls/conn.go:1385 +0x164
bufio.(*Reader).Read(0x4001742ae0, {0x40016c7540, 0x9, 0x180b8?})
	/usr/local/go/src/bufio/bufio.go:241 +0x1b4
io.ReadAtLeast({0x31d4200, 0x4001742ae0}, {0x40016c7540, 0x9, 0x9}, 0x9)
	/usr/local/go/src/io/io.go:335 +0xa0
io.ReadFull(...)
	/usr/local/go/src/io/io.go:354
golang.org/x/net/http2.readFrameHeader({0x40016c7540, 0x9, 0x1f38a20?}, {0x31d4200?, 0x4001742ae0?})
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/frame.go:237 +0x58
golang.org/x/net/http2.(*Framer).ReadFrame(0x40016c7500)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/frame.go:501 +0x78
golang.org/x/net/http2.(*clientConnReadLoop).run(0x4001275f98)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:2354 +0xd0
golang.org/x/net/http2.(*ClientConn).readLoop(0x40013f4480)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:2250 +0x78
created by golang.org/x/net/http2.(*Transport).newClientConn in goroutine 5155
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:865 +0xad0

                                                
                                                
goroutine 5125 [IO wait]:
internal/poll.runtime_pollWait(0xffff54ab1228, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0xa0
internal/poll.(*pollDesc).wait(0x40000d6600?, 0x40017ec000?, 0x0)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x28
internal/poll.(*pollDesc).waitRead(...)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:89
internal/poll.(*FD).Read(0x40000d6600, {0x40017ec000, 0x5500, 0x5500})
	/usr/local/go/src/internal/poll/fd_unix.go:165 +0x1fc
net.(*netFD).Read(0x40000d6600, {0x40017ec000?, 0x18?, 0x10?})
	/usr/local/go/src/net/fd_posix.go:55 +0x28
net.(*conn).Read(0x400138a2f8, {0x40017ec000?, 0x4001273968?, 0x30f588?})
	/usr/local/go/src/net/net.go:189 +0x34
crypto/tls.(*atLeastReader).Read(0x400132c420, {0x40017ec000?, 0x0?, 0x400132c420?})
	/usr/local/go/src/crypto/tls/conn.go:809 +0x40
bytes.(*Buffer).ReadFrom(0x40004be638, {0x31d5f00, 0x400132c420})
	/usr/local/go/src/bytes/buffer.go:211 +0x90
crypto/tls.(*Conn).readFromUntil(0x40004be388, {0xffff5475a960, 0x4001478468}, 0x0?)
	/usr/local/go/src/crypto/tls/conn.go:831 +0xd0
crypto/tls.(*Conn).readRecordOrCCS(0x40004be388, 0x0)
	/usr/local/go/src/crypto/tls/conn.go:629 +0x35c
crypto/tls.(*Conn).readRecord(...)
	/usr/local/go/src/crypto/tls/conn.go:591
crypto/tls.(*Conn).Read(0x40004be388, {0x4001320000, 0x1000, 0x17d74?})
	/usr/local/go/src/crypto/tls/conn.go:1385 +0x164
bufio.(*Reader).Read(0x400165de60, {0x40016c6820, 0x9, 0x180b8?})
	/usr/local/go/src/bufio/bufio.go:241 +0x1b4
io.ReadAtLeast({0x31d4200, 0x400165de60}, {0x40016c6820, 0x9, 0x9}, 0x9)
	/usr/local/go/src/io/io.go:335 +0xa0
io.ReadFull(...)
	/usr/local/go/src/io/io.go:354
golang.org/x/net/http2.readFrameHeader({0x40016c6820, 0x9, 0x1f38a20?}, {0x31d4200?, 0x400165de60?})
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/frame.go:237 +0x58
golang.org/x/net/http2.(*Framer).ReadFrame(0x40016c67e0)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/frame.go:501 +0x78
golang.org/x/net/http2.(*clientConnReadLoop).run(0x4001273f98)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:2354 +0xd0
golang.org/x/net/http2.(*ClientConn).readLoop(0x40013b8780)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:2250 +0x78
created by golang.org/x/net/http2.(*Transport).newClientConn in goroutine 5124
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:865 +0xad0

                                                
                                                
goroutine 5137 [chan receive, 15 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x4001675680, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 5119
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 5157 [select, 15 minutes]:
golang.org/x/net/http2.(*clientStream).writeRequest(0x40013f4600, 0x4000452dc0, 0x0)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:1532 +0x870
golang.org/x/net/http2.(*clientStream).doRequest(0x40013f4600, 0x11?, 0x40013004e0?)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:1410 +0x58
created by golang.org/x/net/http2.(*ClientConn).roundTrip in goroutine 5119
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:1315 +0x374

                                                
                                                
goroutine 5121 [sync.Cond.Wait]:
sync.runtime_notifyListWait(0x4000496650, 0x3)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x4000496640)
	/usr/local/go/src/sync/cond.go:71 +0xcc
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3218260)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/queue.go:282 +0x8c
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x4000496dc0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:159 +0x40
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x30?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x4001684310, {0x31d56c0, 0x4001dd3da0}, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x4001684310, 0x3b9aca00, 0x0, 0x1, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 5102
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 5133 [select, 15 minutes]:
k8s.io/client-go/tools/watch.UntilWithoutRetry({0x31fc998, 0x40004fc540}, {0x31e46a0, 0x40014ecb00}, {0x400079dd98, 0x1, 0x400144a150?})
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/tools/watch/until.go:73 +0x1fc
k8s.io/minikube/pkg/kapi.WaitForDeploymentToStabilize({0x32312a8, 0x400130d6c0}, {0x2399e9a, 0x7}, {0x2397572, 0x6}, 0xd18c2e2800)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/pkg/kapi/kapi.go:125 +0x418
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1.4(0x400141da00)
	/mnt/disks/sdb/jenkins/go/src/k8s.io/minikube/test/integration/net_test.go:159 +0x248
testing.tRunner(0x400141da00, 0x40004a6210)
	/usr/local/go/src/testing/testing.go:1690 +0xe4
created by testing.(*T).Run in goroutine 2421
	/usr/local/go/src/testing/testing.go:1743 +0x314

                                                
                                                
goroutine 5120 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x31f31a0)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:304 +0x258
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 5119
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/util/workqueue/delaying_queue.go:141 +0x200

                                                
                                                
goroutine 5102 [chan receive, 15 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x4000496dc0, 0x400005e380)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 5100
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cache.go:122 +0x48c

                                                
                                                
goroutine 5122 [select]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x31fcba0, 0x400005e380}, 0x4000082740, 0x40012c4f88)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x31fcba0, 0x400005e380}, 0x58?, 0x4000082740, 0x4000082788)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x31fcba0?, 0x400005e380?}, 0x4000171040?, 0x4000171040?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x40013b9da0?, 0x40011be280?, 0x4001842088?)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 5102
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 5134 [select, 15 minutes]:
golang.org/x/net/http2.(*clientStream).writeRequest(0x40013f4300, 0x40000da140, 0x0)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:1532 +0x870
golang.org/x/net/http2.(*clientStream).doRequest(0x40013f4300, 0x8d7b4?, 0x40013b9e00?)
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:1410 +0x58
created by golang.org/x/net/http2.(*ClientConn).roundTrip in goroutine 5133
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:1315 +0x374

                                                
                                                
goroutine 5135 [sync.Cond.Wait, 15 minutes]:
sync.runtime_notifyListWait(0x40013f4348, 0x0)
	/usr/local/go/src/runtime/sema.go:587 +0x154
sync.(*Cond).Wait(0x40013f4338)
	/usr/local/go/src/sync/cond.go:71 +0xcc
golang.org/x/net/http2.(*pipe).Read(0x40013f4330, {0x40016ade00, 0x200, 0x200})
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/pipe.go:76 +0x104
golang.org/x/net/http2.transportResponseBody.Read({0x44761da?}, {0x40016ade00?, 0x4000087538?, 0x59b3b8?})
	/var/lib/jenkins/go/pkg/mod/golang.org/x/net@v0.29.0/http2/transport.go:2637 +0x50
encoding/json.(*Decoder).refill(0x40000da8c0)
	/usr/local/go/src/encoding/json/stream.go:165 +0x164
encoding/json.(*Decoder).readValue(0x40000da8c0)
	/usr/local/go/src/encoding/json/stream.go:140 +0x74
encoding/json.(*Decoder).Decode(0x40000da8c0, {0x1f8bea0, 0x40013f8240})
	/usr/local/go/src/encoding/json/stream.go:63 +0x5c
k8s.io/apimachinery/pkg/util/framer.(*jsonFrameReader).Read(0x4000ad9920, {0x40016ce000, 0x400, 0x400})
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/util/framer/framer.go:151 +0x168
k8s.io/apimachinery/pkg/runtime/serializer/streaming.(*decoder).Decode(0x40000d2280, 0x0, {0x31e2f98, 0x40014ecb40})
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/runtime/serializer/streaming/streaming.go:77 +0x88
k8s.io/client-go/rest/watch.(*Decoder).Decode(0x4001520060)
	/var/lib/jenkins/go/pkg/mod/k8s.io/client-go@v0.31.1/rest/watch/decoder.go:49 +0x5c
k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0x40014ecb00)
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/watch/streamwatcher.go:105 +0xa8
created by k8s.io/apimachinery/pkg/watch.NewStreamWatcher in goroutine 5133
	/var/lib/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.31.1/pkg/watch/streamwatcher.go:76 +0x104

                                                
                                    

Test pass (168/229)

Order passed test Duration
3 TestDownloadOnly/v1.20.0/json-events 7.37
4 TestDownloadOnly/v1.20.0/preload-exists 0
8 TestDownloadOnly/v1.20.0/LogsDuration 0.08
9 TestDownloadOnly/v1.20.0/DeleteAll 26.47
10 TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds 0.13
12 TestDownloadOnly/v1.31.1/json-events 5.03
13 TestDownloadOnly/v1.31.1/preload-exists 0
17 TestDownloadOnly/v1.31.1/LogsDuration 0.09
18 TestDownloadOnly/v1.31.1/DeleteAll 26.49
19 TestDownloadOnly/v1.31.1/DeleteAlwaysSucceeds 0.15
21 TestBinaryMirror 0.57
25 TestAddons/PreSetup/EnablingAddonOnNonExistingCluster 0.07
26 TestAddons/PreSetup/DisablingAddonOnNonExistingCluster 0.07
27 TestAddons/Setup 218.2
35 TestAddons/parallel/InspektorGadget 11.91
40 TestAddons/parallel/Headlamp 16.9
41 TestAddons/parallel/CloudSpanner 6.61
43 TestAddons/parallel/NvidiaDevicePlugin 5.57
44 TestAddons/parallel/Yakd 11.82
45 TestAddons/StoppedEnableDisable 6.42
47 TestCertExpiration 232.45
49 TestForceSystemdFlag 36.77
50 TestForceSystemdEnv 40.83
51 TestDockerEnvContainerd 49.09
56 TestErrorSpam/setup 31.17
57 TestErrorSpam/start 0.74
58 TestErrorSpam/status 1.07
59 TestErrorSpam/pause 2.11
60 TestErrorSpam/unpause 1.95
61 TestErrorSpam/stop 1.48
64 TestFunctional/serial/CopySyncFile 0
65 TestFunctional/serial/StartWithProxy 49.46
66 TestFunctional/serial/AuditLog 0
67 TestFunctional/serial/SoftStart 6.54
72 TestFunctional/serial/CacheCmd/cache/add_remote 4.01
73 TestFunctional/serial/CacheCmd/cache/add_local 1.31
74 TestFunctional/serial/CacheCmd/cache/CacheDelete 0.07
75 TestFunctional/serial/CacheCmd/cache/list 0.07
76 TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node 0.31
77 TestFunctional/serial/CacheCmd/cache/cache_reload 2.11
78 TestFunctional/serial/CacheCmd/cache/delete 0.12
79 TestFunctional/serial/MinikubeKubectlCmd 0.14
80 TestFunctional/serial/MinikubeKubectlCmdDirectly 0.15
81 TestFunctional/serial/ExtraConfig 41.38
83 TestFunctional/serial/LogsCmd 1.84
84 TestFunctional/serial/LogsFileCmd 1.8
87 TestFunctional/parallel/ConfigCmd 0.45
89 TestFunctional/parallel/DryRun 0.67
90 TestFunctional/parallel/InternationalLanguage 0.25
91 TestFunctional/parallel/StatusCmd 1.3
96 TestFunctional/parallel/AddonsCmd 0.16
99 TestFunctional/parallel/SSHCmd 0.69
100 TestFunctional/parallel/CpCmd 2.24
102 TestFunctional/parallel/FileSync 0.32
103 TestFunctional/parallel/CertSync 2.23
109 TestFunctional/parallel/NonActiveRuntimeDisabled 0.85
111 TestFunctional/parallel/License 0.27
113 TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel 0.63
114 TestFunctional/parallel/TunnelCmd/serial/StartTunnel 0
127 TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel 0.11
128 TestFunctional/parallel/ProfileCmd/profile_not_create 0.51
129 TestFunctional/parallel/ProfileCmd/profile_list 0.48
130 TestFunctional/parallel/ProfileCmd/profile_json_output 0.52
132 TestFunctional/parallel/MountCmd/specific-port 2.06
133 TestFunctional/parallel/MountCmd/VerifyCleanup 2.75
134 TestFunctional/parallel/Version/short 0.1
135 TestFunctional/parallel/Version/components 1.3
136 TestFunctional/parallel/ImageCommands/ImageListShort 0.27
137 TestFunctional/parallel/ImageCommands/ImageListTable 0.26
138 TestFunctional/parallel/ImageCommands/ImageListJson 0.3
139 TestFunctional/parallel/ImageCommands/ImageListYaml 0.27
140 TestFunctional/parallel/ImageCommands/ImageBuild 3.65
141 TestFunctional/parallel/ImageCommands/Setup 0.72
142 TestFunctional/parallel/ImageCommands/ImageLoadDaemon 1.9
143 TestFunctional/parallel/ImageCommands/ImageReloadDaemon 1.91
144 TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon 1.74
145 TestFunctional/parallel/ImageCommands/ImageSaveToFile 0.49
146 TestFunctional/parallel/ImageCommands/ImageRemove 0.68
147 TestFunctional/parallel/ImageCommands/ImageLoadFromFile 0.88
148 TestFunctional/parallel/ImageCommands/ImageSaveDaemon 0.5
149 TestFunctional/parallel/UpdateContextCmd/no_changes 0.23
150 TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster 0.16
151 TestFunctional/parallel/UpdateContextCmd/no_clusters 0.18
152 TestFunctional/delete_echo-server_images 0.04
153 TestFunctional/delete_my-image_image 0.02
154 TestFunctional/delete_minikube_cached_images 0.02
158 TestMultiControlPlane/serial/StartCluster 118.73
159 TestMultiControlPlane/serial/DeployApp 33.31
160 TestMultiControlPlane/serial/PingHostFromPods 1.65
161 TestMultiControlPlane/serial/AddWorkerNode 25.74
163 TestMultiControlPlane/serial/HAppyAfterClusterStart 0.78
164 TestMultiControlPlane/serial/CopyFile 19.57
165 TestMultiControlPlane/serial/StopSecondaryNode 12.93
166 TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop 0.6
168 TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart 0.78
169 TestMultiControlPlane/serial/RestartClusterKeepsNodes 141.39
171 TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete 0.57
172 TestMultiControlPlane/serial/StopCluster 36.07
174 TestMultiControlPlane/serial/DegradedAfterClusterRestart 0.54
175 TestMultiControlPlane/serial/AddSecondaryNode 44.85
176 TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd 0.79
180 TestJSONOutput/start/Command 89.03
181 TestJSONOutput/start/Audit 0
183 TestJSONOutput/start/parallel/DistinctCurrentSteps 0
184 TestJSONOutput/start/parallel/IncreasingCurrentSteps 0
186 TestJSONOutput/pause/Command 0.77
187 TestJSONOutput/pause/Audit 0
189 TestJSONOutput/pause/parallel/DistinctCurrentSteps 0
190 TestJSONOutput/pause/parallel/IncreasingCurrentSteps 0
192 TestJSONOutput/unpause/Command 0.69
193 TestJSONOutput/unpause/Audit 0
195 TestJSONOutput/unpause/parallel/DistinctCurrentSteps 0
196 TestJSONOutput/unpause/parallel/IncreasingCurrentSteps 0
198 TestJSONOutput/stop/Command 5.86
199 TestJSONOutput/stop/Audit 0
201 TestJSONOutput/stop/parallel/DistinctCurrentSteps 0
202 TestJSONOutput/stop/parallel/IncreasingCurrentSteps 0
203 TestErrorJSONOutput 0.23
205 TestKicCustomNetwork/create_custom_network 39.77
206 TestKicCustomNetwork/use_default_bridge_network 35.46
207 TestKicExistingNetwork 32.08
208 TestKicCustomSubnet 34.44
209 TestKicStaticIP 36.19
210 TestMainNoArgs 0.05
211 TestMinikubeProfile 65.34
214 TestMountStart/serial/StartWithMountFirst 9.32
215 TestMountStart/serial/VerifyMountFirst 0.25
216 TestMountStart/serial/StartWithMountSecond 6.98
217 TestMountStart/serial/VerifyMountSecond 0.26
218 TestMountStart/serial/DeleteFirst 1.61
219 TestMountStart/serial/VerifyMountPostDelete 0.26
220 TestMountStart/serial/Stop 1.2
221 TestMountStart/serial/RestartStopped 8.13
222 TestMountStart/serial/VerifyMountPostStop 0.26
225 TestMultiNode/serial/FreshStart2Nodes 69.46
226 TestMultiNode/serial/DeployApp2Nodes 17.01
227 TestMultiNode/serial/PingHostFrom2Pods 0.98
228 TestMultiNode/serial/AddNode 18.52
230 TestMultiNode/serial/ProfileList 0.33
231 TestMultiNode/serial/CopyFile 10.63
232 TestMultiNode/serial/StopNode 2.26
234 TestMultiNode/serial/RestartKeepsNodes 128.94
236 TestMultiNode/serial/StopMultiNode 24.21
238 TestMultiNode/serial/ValidateNameConflict 31.59
245 TestScheduledStopUnix 104.98
248 TestInsufficientStorage 10.65
249 TestRunningBinaryUpgrade 82.26
252 TestMissingContainerUpgrade 143.3
254 TestPause/serial/Start 103.54
256 TestNoKubernetes/serial/StartNoK8sWithVersion 0.09
257 TestNoKubernetes/serial/StartWithK8s 44.67
258 TestNoKubernetes/serial/StartWithStopK8s 8.97
259 TestNoKubernetes/serial/Start 7.17
260 TestNoKubernetes/serial/VerifyK8sNotRunning 0.27
261 TestNoKubernetes/serial/ProfileList 0.98
262 TestNoKubernetes/serial/Stop 1.22
263 TestNoKubernetes/serial/StartNoArgs 7.21
264 TestNoKubernetes/serial/VerifyK8sNotRunningSecond 0.31
265 TestPause/serial/SecondStartNoReconfiguration 8.21
266 TestPause/serial/Pause 0.85
267 TestPause/serial/VerifyStatus 0.44
268 TestPause/serial/Unpause 1.06
269 TestPause/serial/PauseAgain 1.07
270 TestPause/serial/DeletePaused 2.81
271 TestPause/serial/VerifyDeletedResources 3.16
272 TestStoppedBinaryUpgrade/Setup 0.61
273 TestStoppedBinaryUpgrade/Upgrade 106.99
274 TestStoppedBinaryUpgrade/MinikubeLogs 1.18
x
+
TestDownloadOnly/v1.20.0/json-events (7.37s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/json-events
aaa_download_only_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -o=json --download-only -p download-only-911311 --force --alsologtostderr --kubernetes-version=v1.20.0 --container-runtime=containerd --driver=docker  --container-runtime=containerd
aaa_download_only_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -o=json --download-only -p download-only-911311 --force --alsologtostderr --kubernetes-version=v1.20.0 --container-runtime=containerd --driver=docker  --container-runtime=containerd: (7.371721686s)
--- PASS: TestDownloadOnly/v1.20.0/json-events (7.37s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/preload-exists
--- PASS: TestDownloadOnly/v1.20.0/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/LogsDuration (0.08s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/LogsDuration
aaa_download_only_test.go:184: (dbg) Run:  out/minikube-linux-arm64 logs -p download-only-911311
aaa_download_only_test.go:184: (dbg) Non-zero exit: out/minikube-linux-arm64 logs -p download-only-911311: exit status 85 (76.120734ms)

                                                
                                                
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	| Command |              Args              |       Profile        |  User   | Version |     Start Time      | End Time |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	| start   | -o=json --download-only        | download-only-911311 | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC |          |
	|         | -p download-only-911311        |                      |         |         |                     |          |
	|         | --force --alsologtostderr      |                      |         |         |                     |          |
	|         | --kubernetes-version=v1.20.0   |                      |         |         |                     |          |
	|         | --container-runtime=containerd |                      |         |         |                     |          |
	|         | --driver=docker                |                      |         |         |                     |          |
	|         | --container-runtime=containerd |                      |         |         |                     |          |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:28:48
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:28:48.445637 2063331 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:28:48.445838 2063331 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:28:48.445866 2063331 out.go:358] Setting ErrFile to fd 2...
	I0916 10:28:48.445884 2063331 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:28:48.446262 2063331 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	W0916 10:28:48.446474 2063331 root.go:314] Error reading config file at /home/jenkins/minikube-integration/19651-2057935/.minikube/config/config.json: open /home/jenkins/minikube-integration/19651-2057935/.minikube/config/config.json: no such file or directory
	I0916 10:28:48.447015 2063331 out.go:352] Setting JSON to true
	I0916 10:28:48.448336 2063331 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":137471,"bootTime":1726345058,"procs":194,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:28:48.448419 2063331 start.go:139] virtualization:  
	I0916 10:28:48.451714 2063331 out.go:97] [download-only-911311] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	W0916 10:28:48.451885 2063331 preload.go:293] Failed to list preload files: open /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball: no such file or directory
	I0916 10:28:48.451982 2063331 notify.go:220] Checking for updates...
	I0916 10:28:48.454177 2063331 out.go:169] MINIKUBE_LOCATION=19651
	I0916 10:28:48.458660 2063331 out.go:169] MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:28:48.460760 2063331 out.go:169] KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:28:48.462539 2063331 out.go:169] MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:28:48.464448 2063331 out.go:169] MINIKUBE_BIN=out/minikube-linux-arm64
	W0916 10:28:48.467996 2063331 out.go:321] minikube skips various validations when --force is supplied; this may lead to unexpected behavior
	I0916 10:28:48.468257 2063331 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:28:48.496029 2063331 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:28:48.496151 2063331 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:28:48.554664 2063331 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:45 OomKillDisable:true NGoroutines:72 SystemTime:2024-09-16 10:28:48.545077522 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:28:48.554797 2063331 docker.go:318] overlay module found
	I0916 10:28:48.556981 2063331 out.go:97] Using the docker driver based on user configuration
	I0916 10:28:48.557011 2063331 start.go:297] selected driver: docker
	I0916 10:28:48.557019 2063331 start.go:901] validating driver "docker" against <nil>
	I0916 10:28:48.557144 2063331 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:28:48.609836 2063331 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:45 OomKillDisable:true NGoroutines:72 SystemTime:2024-09-16 10:28:48.600925475 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:28:48.610056 2063331 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:28:48.610355 2063331 start_flags.go:393] Using suggested 2200MB memory alloc based on sys=7834MB, container=7834MB
	I0916 10:28:48.610520 2063331 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0916 10:28:48.612463 2063331 out.go:169] Using Docker driver with root privileges
	I0916 10:28:48.614415 2063331 cni.go:84] Creating CNI manager for ""
	I0916 10:28:48.614480 2063331 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0916 10:28:48.614498 2063331 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0916 10:28:48.614575 2063331 start.go:340] cluster config:
	{Name:download-only-911311 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:download-only-911311 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Co
ntainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:28:48.616368 2063331 out.go:97] Starting "download-only-911311" primary control-plane node in "download-only-911311" cluster
	I0916 10:28:48.616399 2063331 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0916 10:28:48.617791 2063331 out.go:97] Pulling base image v0.0.45-1726358845-19644 ...
	I0916 10:28:48.617813 2063331 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime containerd
	I0916 10:28:48.617978 2063331 image.go:79] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local docker daemon
	I0916 10:28:48.633861 2063331 cache.go:149] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:28:48.634033 2063331 image.go:63] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 in local cache directory
	I0916 10:28:48.634134 2063331 image.go:148] Writing gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 to local cache
	I0916 10:28:48.675081 2063331 preload.go:118] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.20.0/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4
	I0916 10:28:48.675107 2063331 cache.go:56] Caching tarball of preloaded images
	I0916 10:28:48.675257 2063331 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime containerd
	I0916 10:28:48.677253 2063331 out.go:97] Downloading Kubernetes v1.20.0 preload ...
	I0916 10:28:48.677276 2063331 preload.go:236] getting checksum for preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4 ...
	I0916 10:28:48.760427 2063331 download.go:107] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.20.0/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4?checksum=md5:7e3d48ccb9f143791669d02e14ce1643 -> /home/jenkins/minikube-integration/19651-2057935/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4
	I0916 10:28:53.189355 2063331 cache.go:152] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 as a tarball
	
	
	* The control-plane node download-only-911311 host does not exist
	  To start a cluster, run: "minikube start -p download-only-911311"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:185: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.20.0/LogsDuration (0.08s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/DeleteAll (26.47s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/DeleteAll
aaa_download_only_test.go:197: (dbg) Run:  out/minikube-linux-arm64 delete --all
aaa_download_only_test.go:197: (dbg) Done: out/minikube-linux-arm64 delete --all: (26.473654726s)
--- PASS: TestDownloadOnly/v1.20.0/DeleteAll (26.47s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds (0.13s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds
aaa_download_only_test.go:208: (dbg) Run:  out/minikube-linux-arm64 delete -p download-only-911311
--- PASS: TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds (0.13s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/json-events (5.03s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/json-events
aaa_download_only_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -o=json --download-only -p download-only-889126 --force --alsologtostderr --kubernetes-version=v1.31.1 --container-runtime=containerd --driver=docker  --container-runtime=containerd
aaa_download_only_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -o=json --download-only -p download-only-889126 --force --alsologtostderr --kubernetes-version=v1.31.1 --container-runtime=containerd --driver=docker  --container-runtime=containerd: (5.032042043s)
--- PASS: TestDownloadOnly/v1.31.1/json-events (5.03s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/preload-exists
--- PASS: TestDownloadOnly/v1.31.1/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/LogsDuration (0.09s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/LogsDuration
aaa_download_only_test.go:184: (dbg) Run:  out/minikube-linux-arm64 logs -p download-only-889126
aaa_download_only_test.go:184: (dbg) Non-zero exit: out/minikube-linux-arm64 logs -p download-only-889126: exit status 85 (91.057234ms)

                                                
                                                
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| Command |              Args              |       Profile        |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only        | download-only-911311 | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC |                     |
	|         | -p download-only-911311        |                      |         |         |                     |                     |
	|         | --force --alsologtostderr      |                      |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0   |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	|         | --driver=docker                |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	| delete  | --all                          | minikube             | jenkins | v1.34.0 | 16 Sep 24 10:28 UTC | 16 Sep 24 10:29 UTC |
	| delete  | -p download-only-911311        | download-only-911311 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC | 16 Sep 24 10:29 UTC |
	| start   | -o=json --download-only        | download-only-889126 | jenkins | v1.34.0 | 16 Sep 24 10:29 UTC |                     |
	|         | -p download-only-889126        |                      |         |         |                     |                     |
	|         | --force --alsologtostderr      |                      |         |         |                     |                     |
	|         | --kubernetes-version=v1.31.1   |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	|         | --driver=docker                |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2024/09/16 10:29:22
	Running on machine: ip-172-31-31-251
	Binary: Built with gc go1.23.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0916 10:29:22.497273 2063642 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:29:22.497438 2063642 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:22.497449 2063642 out.go:358] Setting ErrFile to fd 2...
	I0916 10:29:22.497455 2063642 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:29:22.497693 2063642 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:29:22.498078 2063642 out.go:352] Setting JSON to true
	I0916 10:29:22.499064 2063642 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":137505,"bootTime":1726345058,"procs":192,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:29:22.499134 2063642 start.go:139] virtualization:  
	I0916 10:29:22.501741 2063642 out.go:97] [download-only-889126] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:29:22.501948 2063642 notify.go:220] Checking for updates...
	I0916 10:29:22.504063 2063642 out.go:169] MINIKUBE_LOCATION=19651
	I0916 10:29:22.505966 2063642 out.go:169] MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:29:22.507692 2063642 out.go:169] KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:29:22.509554 2063642 out.go:169] MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:29:22.511462 2063642 out.go:169] MINIKUBE_BIN=out/minikube-linux-arm64
	W0916 10:29:22.514910 2063642 out.go:321] minikube skips various validations when --force is supplied; this may lead to unexpected behavior
	I0916 10:29:22.515162 2063642 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:29:22.534614 2063642 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:29:22.534743 2063642 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:22.589737 2063642 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:47 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:22.580412097 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:22.589846 2063642 docker.go:318] overlay module found
	I0916 10:29:22.591564 2063642 out.go:97] Using the docker driver based on user configuration
	I0916 10:29:22.591589 2063642 start.go:297] selected driver: docker
	I0916 10:29:22.591596 2063642 start.go:901] validating driver "docker" against <nil>
	I0916 10:29:22.591729 2063642 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:29:22.653373 2063642 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:2 ContainersRunning:2 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:47 OomKillDisable:true NGoroutines:64 SystemTime:2024-09-16 10:29:22.644408613 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:29:22.653598 2063642 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0916 10:29:22.653888 2063642 start_flags.go:393] Using suggested 2200MB memory alloc based on sys=7834MB, container=7834MB
	I0916 10:29:22.654043 2063642 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0916 10:29:22.655864 2063642 out.go:169] Using Docker driver with root privileges
	
	
	* The control-plane node download-only-889126 host does not exist
	  To start a cluster, run: "minikube start -p download-only-889126"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:185: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.31.1/LogsDuration (0.09s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/DeleteAll (26.49s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/DeleteAll
aaa_download_only_test.go:197: (dbg) Run:  out/minikube-linux-arm64 delete --all
aaa_download_only_test.go:197: (dbg) Done: out/minikube-linux-arm64 delete --all: (26.49337891s)
--- PASS: TestDownloadOnly/v1.31.1/DeleteAll (26.49s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/DeleteAlwaysSucceeds (0.15s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/DeleteAlwaysSucceeds
aaa_download_only_test.go:208: (dbg) Run:  out/minikube-linux-arm64 delete -p download-only-889126
--- PASS: TestDownloadOnly/v1.31.1/DeleteAlwaysSucceeds (0.15s)

                                                
                                    
x
+
TestBinaryMirror (0.57s)

                                                
                                                
=== RUN   TestBinaryMirror
aaa_download_only_test.go:314: (dbg) Run:  out/minikube-linux-arm64 start --download-only -p binary-mirror-852743 --alsologtostderr --binary-mirror http://127.0.0.1:35351 --driver=docker  --container-runtime=containerd
helpers_test.go:175: Cleaning up "binary-mirror-852743" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p binary-mirror-852743
--- PASS: TestBinaryMirror (0.57s)

                                                
                                    
x
+
TestAddons/PreSetup/EnablingAddonOnNonExistingCluster (0.07s)

                                                
                                                
=== RUN   TestAddons/PreSetup/EnablingAddonOnNonExistingCluster
=== PAUSE TestAddons/PreSetup/EnablingAddonOnNonExistingCluster

                                                
                                                

                                                
                                                
=== CONT  TestAddons/PreSetup/EnablingAddonOnNonExistingCluster
addons_test.go:1037: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p addons-451841
addons_test.go:1037: (dbg) Non-zero exit: out/minikube-linux-arm64 addons enable dashboard -p addons-451841: exit status 85 (74.739292ms)

                                                
                                                
-- stdout --
	* Profile "addons-451841" not found. Run "minikube profile list" to view all profiles.
	  To start a cluster, run: "minikube start -p addons-451841"

                                                
                                                
-- /stdout --
--- PASS: TestAddons/PreSetup/EnablingAddonOnNonExistingCluster (0.07s)

                                                
                                    
x
+
TestAddons/PreSetup/DisablingAddonOnNonExistingCluster (0.07s)

                                                
                                                
=== RUN   TestAddons/PreSetup/DisablingAddonOnNonExistingCluster
=== PAUSE TestAddons/PreSetup/DisablingAddonOnNonExistingCluster

                                                
                                                

                                                
                                                
=== CONT  TestAddons/PreSetup/DisablingAddonOnNonExistingCluster
addons_test.go:1048: (dbg) Run:  out/minikube-linux-arm64 addons disable dashboard -p addons-451841
addons_test.go:1048: (dbg) Non-zero exit: out/minikube-linux-arm64 addons disable dashboard -p addons-451841: exit status 85 (71.790228ms)

                                                
                                                
-- stdout --
	* Profile "addons-451841" not found. Run "minikube profile list" to view all profiles.
	  To start a cluster, run: "minikube start -p addons-451841"

                                                
                                                
-- /stdout --
--- PASS: TestAddons/PreSetup/DisablingAddonOnNonExistingCluster (0.07s)

                                                
                                    
x
+
TestAddons/Setup (218.2s)

                                                
                                                
=== RUN   TestAddons/Setup
addons_test.go:110: (dbg) Run:  out/minikube-linux-arm64 start -p addons-451841 --wait=true --memory=4000 --alsologtostderr --addons=registry --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=storage-provisioner-rancher --addons=nvidia-device-plugin --addons=yakd --addons=volcano --driver=docker  --container-runtime=containerd --addons=ingress --addons=ingress-dns
addons_test.go:110: (dbg) Done: out/minikube-linux-arm64 start -p addons-451841 --wait=true --memory=4000 --alsologtostderr --addons=registry --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=storage-provisioner-rancher --addons=nvidia-device-plugin --addons=yakd --addons=volcano --driver=docker  --container-runtime=containerd --addons=ingress --addons=ingress-dns: (3m38.203469047s)
--- PASS: TestAddons/Setup (218.20s)

                                                
                                    
x
+
TestAddons/parallel/InspektorGadget (11.91s)

                                                
                                                
=== RUN   TestAddons/parallel/InspektorGadget
=== PAUSE TestAddons/parallel/InspektorGadget

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/InspektorGadget
addons_test.go:848: (dbg) TestAddons/parallel/InspektorGadget: waiting 8m0s for pods matching "k8s-app=gadget" in namespace "gadget" ...
helpers_test.go:344: "gadget-wjwc2" [6a752659-ec3c-4841-8e83-fd916caaebc2] Running / Ready:ContainersNotReady (containers with unready status: [gadget]) / ContainersReady:ContainersNotReady (containers with unready status: [gadget])
addons_test.go:848: (dbg) TestAddons/parallel/InspektorGadget: k8s-app=gadget healthy within 6.004459735s
addons_test.go:851: (dbg) Run:  out/minikube-linux-arm64 addons disable inspektor-gadget -p addons-451841
addons_test.go:851: (dbg) Done: out/minikube-linux-arm64 addons disable inspektor-gadget -p addons-451841: (5.901139905s)
--- PASS: TestAddons/parallel/InspektorGadget (11.91s)

                                                
                                    
x
+
TestAddons/parallel/Headlamp (16.9s)

                                                
                                                
=== RUN   TestAddons/parallel/Headlamp
=== PAUSE TestAddons/parallel/Headlamp

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Headlamp
addons_test.go:830: (dbg) Run:  out/minikube-linux-arm64 addons enable headlamp -p addons-451841 --alsologtostderr -v=1
addons_test.go:830: (dbg) Done: out/minikube-linux-arm64 addons enable headlamp -p addons-451841 --alsologtostderr -v=1: (1.09906787s)
addons_test.go:835: (dbg) TestAddons/parallel/Headlamp: waiting 8m0s for pods matching "app.kubernetes.io/name=headlamp" in namespace "headlamp" ...
helpers_test.go:344: "headlamp-57fb76fcdb-qwn8v" [3fcd26c0-1de3-44c9-b148-e0521f1ba0f9] Pending / Ready:ContainersNotReady (containers with unready status: [headlamp]) / ContainersReady:ContainersNotReady (containers with unready status: [headlamp])
helpers_test.go:344: "headlamp-57fb76fcdb-qwn8v" [3fcd26c0-1de3-44c9-b148-e0521f1ba0f9] Running
addons_test.go:835: (dbg) TestAddons/parallel/Headlamp: app.kubernetes.io/name=headlamp healthy within 10.00415781s
addons_test.go:839: (dbg) Run:  out/minikube-linux-arm64 -p addons-451841 addons disable headlamp --alsologtostderr -v=1
addons_test.go:839: (dbg) Done: out/minikube-linux-arm64 -p addons-451841 addons disable headlamp --alsologtostderr -v=1: (5.793560636s)
--- PASS: TestAddons/parallel/Headlamp (16.90s)

                                                
                                    
x
+
TestAddons/parallel/CloudSpanner (6.61s)

                                                
                                                
=== RUN   TestAddons/parallel/CloudSpanner
=== PAUSE TestAddons/parallel/CloudSpanner

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/CloudSpanner
addons_test.go:867: (dbg) TestAddons/parallel/CloudSpanner: waiting 6m0s for pods matching "app=cloud-spanner-emulator" in namespace "default" ...
helpers_test.go:344: "cloud-spanner-emulator-769b77f747-m5wld" [03e81c9e-f616-478a-bb93-363bd2db9f93] Running
addons_test.go:867: (dbg) TestAddons/parallel/CloudSpanner: app=cloud-spanner-emulator healthy within 6.003926051s
addons_test.go:870: (dbg) Run:  out/minikube-linux-arm64 addons disable cloud-spanner -p addons-451841
--- PASS: TestAddons/parallel/CloudSpanner (6.61s)

                                                
                                    
x
+
TestAddons/parallel/NvidiaDevicePlugin (5.57s)

                                                
                                                
=== RUN   TestAddons/parallel/NvidiaDevicePlugin
=== PAUSE TestAddons/parallel/NvidiaDevicePlugin

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/NvidiaDevicePlugin
addons_test.go:1061: (dbg) TestAddons/parallel/NvidiaDevicePlugin: waiting 6m0s for pods matching "name=nvidia-device-plugin-ds" in namespace "kube-system" ...
helpers_test.go:344: "nvidia-device-plugin-daemonset-l6r5c" [9645617a-ab29-4c4e-a4ec-4ea217ffb5fb] Running
addons_test.go:1061: (dbg) TestAddons/parallel/NvidiaDevicePlugin: name=nvidia-device-plugin-ds healthy within 5.003785679s
addons_test.go:1064: (dbg) Run:  out/minikube-linux-arm64 addons disable nvidia-device-plugin -p addons-451841
--- PASS: TestAddons/parallel/NvidiaDevicePlugin (5.57s)

                                                
                                    
x
+
TestAddons/parallel/Yakd (11.82s)

                                                
                                                
=== RUN   TestAddons/parallel/Yakd
=== PAUSE TestAddons/parallel/Yakd

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Yakd
addons_test.go:1072: (dbg) TestAddons/parallel/Yakd: waiting 2m0s for pods matching "app.kubernetes.io/name=yakd-dashboard" in namespace "yakd-dashboard" ...
helpers_test.go:344: "yakd-dashboard-67d98fc6b-djh62" [3884eda5-248f-48c2-87cc-ddd844445abb] Running
addons_test.go:1072: (dbg) TestAddons/parallel/Yakd: app.kubernetes.io/name=yakd-dashboard healthy within 6.003369928s
addons_test.go:1076: (dbg) Run:  out/minikube-linux-arm64 -p addons-451841 addons disable yakd --alsologtostderr -v=1
addons_test.go:1076: (dbg) Done: out/minikube-linux-arm64 -p addons-451841 addons disable yakd --alsologtostderr -v=1: (5.816413818s)
--- PASS: TestAddons/parallel/Yakd (11.82s)

                                                
                                    
x
+
TestAddons/StoppedEnableDisable (6.42s)

                                                
                                                
=== RUN   TestAddons/StoppedEnableDisable
addons_test.go:174: (dbg) Run:  out/minikube-linux-arm64 stop -p addons-451841
addons_test.go:174: (dbg) Done: out/minikube-linux-arm64 stop -p addons-451841: (6.143477482s)
addons_test.go:178: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p addons-451841
addons_test.go:182: (dbg) Run:  out/minikube-linux-arm64 addons disable dashboard -p addons-451841
addons_test.go:187: (dbg) Run:  out/minikube-linux-arm64 addons disable gvisor -p addons-451841
--- PASS: TestAddons/StoppedEnableDisable (6.42s)

                                                
                                    
x
+
TestCertExpiration (232.45s)

                                                
                                                
=== RUN   TestCertExpiration
=== PAUSE TestCertExpiration

                                                
                                                

                                                
                                                
=== CONT  TestCertExpiration
cert_options_test.go:123: (dbg) Run:  out/minikube-linux-arm64 start -p cert-expiration-617624 --memory=2048 --cert-expiration=3m --driver=docker  --container-runtime=containerd
E0916 11:23:33.956464 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
cert_options_test.go:123: (dbg) Done: out/minikube-linux-arm64 start -p cert-expiration-617624 --memory=2048 --cert-expiration=3m --driver=docker  --container-runtime=containerd: (41.591271886s)
cert_options_test.go:131: (dbg) Run:  out/minikube-linux-arm64 start -p cert-expiration-617624 --memory=2048 --cert-expiration=8760h --driver=docker  --container-runtime=containerd
cert_options_test.go:131: (dbg) Done: out/minikube-linux-arm64 start -p cert-expiration-617624 --memory=2048 --cert-expiration=8760h --driver=docker  --container-runtime=containerd: (8.555466842s)
helpers_test.go:175: Cleaning up "cert-expiration-617624" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p cert-expiration-617624
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p cert-expiration-617624: (2.302257021s)
--- PASS: TestCertExpiration (232.45s)

                                                
                                    
x
+
TestForceSystemdFlag (36.77s)

                                                
                                                
=== RUN   TestForceSystemdFlag
=== PAUSE TestForceSystemdFlag

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdFlag
docker_test.go:91: (dbg) Run:  out/minikube-linux-arm64 start -p force-systemd-flag-266994 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd
docker_test.go:91: (dbg) Done: out/minikube-linux-arm64 start -p force-systemd-flag-266994 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd: (34.284172601s)
docker_test.go:121: (dbg) Run:  out/minikube-linux-arm64 -p force-systemd-flag-266994 ssh "cat /etc/containerd/config.toml"
helpers_test.go:175: Cleaning up "force-systemd-flag-266994" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p force-systemd-flag-266994
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p force-systemd-flag-266994: (2.114932108s)
--- PASS: TestForceSystemdFlag (36.77s)

                                                
                                    
x
+
TestForceSystemdEnv (40.83s)

                                                
                                                
=== RUN   TestForceSystemdEnv
=== PAUSE TestForceSystemdEnv

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdEnv
docker_test.go:155: (dbg) Run:  out/minikube-linux-arm64 start -p force-systemd-env-836951 --memory=2048 --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd
docker_test.go:155: (dbg) Done: out/minikube-linux-arm64 start -p force-systemd-env-836951 --memory=2048 --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd: (37.806111002s)
docker_test.go:121: (dbg) Run:  out/minikube-linux-arm64 -p force-systemd-env-836951 ssh "cat /etc/containerd/config.toml"
helpers_test.go:175: Cleaning up "force-systemd-env-836951" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p force-systemd-env-836951
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p force-systemd-env-836951: (2.581342593s)
--- PASS: TestForceSystemdEnv (40.83s)

                                                
                                    
x
+
TestDockerEnvContainerd (49.09s)

                                                
                                                
=== RUN   TestDockerEnvContainerd
docker_test.go:170: running with containerd true linux arm64
docker_test.go:181: (dbg) Run:  out/minikube-linux-arm64 start -p dockerenv-701218 --driver=docker  --container-runtime=containerd
docker_test.go:181: (dbg) Done: out/minikube-linux-arm64 start -p dockerenv-701218 --driver=docker  --container-runtime=containerd: (33.618544506s)
docker_test.go:189: (dbg) Run:  /bin/bash -c "out/minikube-linux-arm64 docker-env --ssh-host --ssh-add -p dockerenv-701218"
docker_test.go:189: (dbg) Done: /bin/bash -c "out/minikube-linux-arm64 docker-env --ssh-host --ssh-add -p dockerenv-701218": (1.008210159s)
docker_test.go:220: (dbg) Run:  /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-dgucbQ8dDO3a/agent.2080832" SSH_AGENT_PID="2080833" DOCKER_HOST=ssh://docker@127.0.0.1:40582 docker version"
docker_test.go:243: (dbg) Run:  /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-dgucbQ8dDO3a/agent.2080832" SSH_AGENT_PID="2080833" DOCKER_HOST=ssh://docker@127.0.0.1:40582 DOCKER_BUILDKIT=0 docker build -t local/minikube-dockerenv-containerd-test:latest testdata/docker-env"
docker_test.go:243: (dbg) Done: /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-dgucbQ8dDO3a/agent.2080832" SSH_AGENT_PID="2080833" DOCKER_HOST=ssh://docker@127.0.0.1:40582 DOCKER_BUILDKIT=0 docker build -t local/minikube-dockerenv-containerd-test:latest testdata/docker-env": (1.116697632s)
docker_test.go:250: (dbg) Run:  /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-dgucbQ8dDO3a/agent.2080832" SSH_AGENT_PID="2080833" DOCKER_HOST=ssh://docker@127.0.0.1:40582 docker image ls"
helpers_test.go:175: Cleaning up "dockerenv-701218" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p dockerenv-701218
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p dockerenv-701218: (1.92856612s)
--- PASS: TestDockerEnvContainerd (49.09s)

                                                
                                    
x
+
TestErrorSpam/setup (31.17s)

                                                
                                                
=== RUN   TestErrorSpam/setup
error_spam_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -p nospam-826306 -n=1 --memory=2250 --wait=false --log_dir=/tmp/nospam-826306 --driver=docker  --container-runtime=containerd
error_spam_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -p nospam-826306 -n=1 --memory=2250 --wait=false --log_dir=/tmp/nospam-826306 --driver=docker  --container-runtime=containerd: (31.166798586s)
error_spam_test.go:91: acceptable stderr: "E0916 10:46:57.169123 2081390 start.go:291] kubectl info: exec: fork/exec /usr/local/bin/kubectl: exec format error"
--- PASS: TestErrorSpam/setup (31.17s)

                                                
                                    
x
+
TestErrorSpam/start (0.74s)

                                                
                                                
=== RUN   TestErrorSpam/start
error_spam_test.go:216: Cleaning up 1 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 start --dry-run
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 start --dry-run
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 start --dry-run
--- PASS: TestErrorSpam/start (0.74s)

                                                
                                    
x
+
TestErrorSpam/status (1.07s)

                                                
                                                
=== RUN   TestErrorSpam/status
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 status
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 status
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 status
--- PASS: TestErrorSpam/status (1.07s)

                                                
                                    
x
+
TestErrorSpam/pause (2.11s)

                                                
                                                
=== RUN   TestErrorSpam/pause
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 pause
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 pause
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 pause
--- PASS: TestErrorSpam/pause (2.11s)

                                                
                                    
x
+
TestErrorSpam/unpause (1.95s)

                                                
                                                
=== RUN   TestErrorSpam/unpause
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 unpause
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 unpause
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 unpause
--- PASS: TestErrorSpam/unpause (1.95s)

                                                
                                    
x
+
TestErrorSpam/stop (1.48s)

                                                
                                                
=== RUN   TestErrorSpam/stop
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 stop
error_spam_test.go:159: (dbg) Done: out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 stop: (1.287480991s)
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 stop
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-826306 --log_dir /tmp/nospam-826306 stop
--- PASS: TestErrorSpam/stop (1.48s)

                                                
                                    
x
+
TestFunctional/serial/CopySyncFile (0s)

                                                
                                                
=== RUN   TestFunctional/serial/CopySyncFile
functional_test.go:1855: local sync path: /home/jenkins/minikube-integration/19651-2057935/.minikube/files/etc/test/nested/copy/2063326/hosts
--- PASS: TestFunctional/serial/CopySyncFile (0.00s)

                                                
                                    
x
+
TestFunctional/serial/StartWithProxy (49.46s)

                                                
                                                
=== RUN   TestFunctional/serial/StartWithProxy
functional_test.go:2234: (dbg) Run:  out/minikube-linux-arm64 start -p functional-911502 --memory=4000 --apiserver-port=8441 --wait=all --driver=docker  --container-runtime=containerd
functional_test.go:2234: (dbg) Done: out/minikube-linux-arm64 start -p functional-911502 --memory=4000 --apiserver-port=8441 --wait=all --driver=docker  --container-runtime=containerd: (49.461634175s)
--- PASS: TestFunctional/serial/StartWithProxy (49.46s)

                                                
                                    
x
+
TestFunctional/serial/AuditLog (0s)

                                                
                                                
=== RUN   TestFunctional/serial/AuditLog
--- PASS: TestFunctional/serial/AuditLog (0.00s)

                                                
                                    
x
+
TestFunctional/serial/SoftStart (6.54s)

                                                
                                                
=== RUN   TestFunctional/serial/SoftStart
functional_test.go:659: (dbg) Run:  out/minikube-linux-arm64 start -p functional-911502 --alsologtostderr -v=8
functional_test.go:659: (dbg) Done: out/minikube-linux-arm64 start -p functional-911502 --alsologtostderr -v=8: (6.538838583s)
functional_test.go:663: soft start took 6.541947461s for "functional-911502" cluster.
--- PASS: TestFunctional/serial/SoftStart (6.54s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_remote (4.01s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_remote
functional_test.go:1049: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 cache add registry.k8s.io/pause:3.1
functional_test.go:1049: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 cache add registry.k8s.io/pause:3.1: (1.508637814s)
functional_test.go:1049: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 cache add registry.k8s.io/pause:3.3
functional_test.go:1049: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 cache add registry.k8s.io/pause:3.3: (1.283581656s)
functional_test.go:1049: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 cache add registry.k8s.io/pause:latest
functional_test.go:1049: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 cache add registry.k8s.io/pause:latest: (1.217469977s)
--- PASS: TestFunctional/serial/CacheCmd/cache/add_remote (4.01s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_local (1.31s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_local
functional_test.go:1077: (dbg) Run:  docker build -t minikube-local-cache-test:functional-911502 /tmp/TestFunctionalserialCacheCmdcacheadd_local2367358936/001
functional_test.go:1089: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 cache add minikube-local-cache-test:functional-911502
functional_test.go:1094: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 cache delete minikube-local-cache-test:functional-911502
functional_test.go:1083: (dbg) Run:  docker rmi minikube-local-cache-test:functional-911502
--- PASS: TestFunctional/serial/CacheCmd/cache/add_local (1.31s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/CacheDelete (0.07s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/CacheDelete
functional_test.go:1102: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:3.3
--- PASS: TestFunctional/serial/CacheCmd/cache/CacheDelete (0.07s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/list (0.07s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/list
functional_test.go:1110: (dbg) Run:  out/minikube-linux-arm64 cache list
--- PASS: TestFunctional/serial/CacheCmd/cache/list (0.07s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (0.31s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node
functional_test.go:1124: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh sudo crictl images
--- PASS: TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (0.31s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/cache_reload (2.11s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/cache_reload
functional_test.go:1147: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh sudo crictl rmi registry.k8s.io/pause:latest
functional_test.go:1153: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh sudo crictl inspecti registry.k8s.io/pause:latest
functional_test.go:1153: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 ssh sudo crictl inspecti registry.k8s.io/pause:latest: exit status 1 (309.15952ms)

                                                
                                                
-- stdout --
	FATA[0000] no such image "registry.k8s.io/pause:latest" present 

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test.go:1158: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 cache reload
functional_test.go:1158: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 cache reload: (1.16730301s)
functional_test.go:1163: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh sudo crictl inspecti registry.k8s.io/pause:latest
--- PASS: TestFunctional/serial/CacheCmd/cache/cache_reload (2.11s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/delete (0.12s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/delete
functional_test.go:1172: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:3.1
functional_test.go:1172: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:latest
--- PASS: TestFunctional/serial/CacheCmd/cache/delete (0.12s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmd (0.14s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmd
functional_test.go:716: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 kubectl -- --context functional-911502 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmd (0.14s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmdDirectly (0.15s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmdDirectly
functional_test.go:741: (dbg) Run:  out/kubectl --context functional-911502 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmdDirectly (0.15s)

                                                
                                    
x
+
TestFunctional/serial/ExtraConfig (41.38s)

                                                
                                                
=== RUN   TestFunctional/serial/ExtraConfig
functional_test.go:757: (dbg) Run:  out/minikube-linux-arm64 start -p functional-911502 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all
E0916 10:48:33.957380 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:33.967280 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:33.978759 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:34.000250 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:34.043140 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:34.124753 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:34.286360 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:34.608115 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:35.249899 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:36.531343 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:39.093559 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:44.215158 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:48:54.457045 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
functional_test.go:757: (dbg) Done: out/minikube-linux-arm64 start -p functional-911502 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all: (41.379820957s)
functional_test.go:761: restart took 41.379937952s for "functional-911502" cluster.
--- PASS: TestFunctional/serial/ExtraConfig (41.38s)

                                                
                                    
x
+
TestFunctional/serial/LogsCmd (1.84s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsCmd
functional_test.go:1236: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 logs
functional_test.go:1236: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 logs: (1.839626787s)
--- PASS: TestFunctional/serial/LogsCmd (1.84s)

                                                
                                    
x
+
TestFunctional/serial/LogsFileCmd (1.8s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsFileCmd
functional_test.go:1250: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 logs --file /tmp/TestFunctionalserialLogsFileCmd3954237537/001/logs.txt
functional_test.go:1250: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 logs --file /tmp/TestFunctionalserialLogsFileCmd3954237537/001/logs.txt: (1.796935382s)
--- PASS: TestFunctional/serial/LogsFileCmd (1.80s)

                                                
                                    
x
+
TestFunctional/parallel/ConfigCmd (0.45s)

                                                
                                                
=== RUN   TestFunctional/parallel/ConfigCmd
=== PAUSE TestFunctional/parallel/ConfigCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ConfigCmd
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 config unset cpus
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 config get cpus
functional_test.go:1199: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 config get cpus: exit status 14 (81.264234ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 config set cpus 2
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 config get cpus
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 config unset cpus
functional_test.go:1199: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 config get cpus
functional_test.go:1199: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 config get cpus: exit status 14 (54.463074ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/ConfigCmd (0.45s)

                                                
                                    
x
+
TestFunctional/parallel/DryRun (0.67s)

                                                
                                                
=== RUN   TestFunctional/parallel/DryRun
=== PAUSE TestFunctional/parallel/DryRun

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DryRun
functional_test.go:974: (dbg) Run:  out/minikube-linux-arm64 start -p functional-911502 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd
functional_test.go:974: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p functional-911502 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd: exit status 23 (271.365973ms)

                                                
                                                
-- stdout --
	* [functional-911502] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=19651
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Using the docker driver based on existing profile
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 10:50:59.112267 2096704 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:50:59.112479 2096704 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:50:59.112507 2096704 out.go:358] Setting ErrFile to fd 2...
	I0916 10:50:59.112525 2096704 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:50:59.112807 2096704 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:50:59.113213 2096704 out.go:352] Setting JSON to false
	I0916 10:50:59.114315 2096704 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":138801,"bootTime":1726345058,"procs":204,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:50:59.114457 2096704 start.go:139] virtualization:  
	I0916 10:50:59.117791 2096704 out.go:177] * [functional-911502] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	I0916 10:50:59.120823 2096704 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:50:59.120909 2096704 notify.go:220] Checking for updates...
	I0916 10:50:59.126231 2096704 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:50:59.128825 2096704 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:50:59.131405 2096704 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:50:59.134058 2096704 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:50:59.136948 2096704 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:50:59.140271 2096704 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:50:59.140837 2096704 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:50:59.173699 2096704 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:50:59.173814 2096704 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:50:59.308995 2096704 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:49 OomKillDisable:true NGoroutines:72 SystemTime:2024-09-16 10:50:59.294466542 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:50:59.309103 2096704 docker.go:318] overlay module found
	I0916 10:50:59.314035 2096704 out.go:177] * Using the docker driver based on existing profile
	I0916 10:50:59.316619 2096704 start.go:297] selected driver: docker
	I0916 10:50:59.316646 2096704 start.go:901] validating driver "docker" against &{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:do
cker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:50:59.316780 2096704 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:50:59.320148 2096704 out.go:201] 
	W0916 10:50:59.322909 2096704 out.go:270] X Exiting due to RSRC_INSUFFICIENT_REQ_MEMORY: Requested memory allocation 250MiB is less than the usable minimum of 1800MB
	X Exiting due to RSRC_INSUFFICIENT_REQ_MEMORY: Requested memory allocation 250MiB is less than the usable minimum of 1800MB
	I0916 10:50:59.325575 2096704 out.go:201] 

                                                
                                                
** /stderr **
functional_test.go:991: (dbg) Run:  out/minikube-linux-arm64 start -p functional-911502 --dry-run --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
--- PASS: TestFunctional/parallel/DryRun (0.67s)

                                                
                                    
x
+
TestFunctional/parallel/InternationalLanguage (0.25s)

                                                
                                                
=== RUN   TestFunctional/parallel/InternationalLanguage
=== PAUSE TestFunctional/parallel/InternationalLanguage

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/InternationalLanguage
functional_test.go:1020: (dbg) Run:  out/minikube-linux-arm64 start -p functional-911502 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd
functional_test.go:1020: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p functional-911502 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd: exit status 23 (250.785582ms)

                                                
                                                
-- stdout --
	* [functional-911502] minikube v1.34.0 sur Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=19651
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Utilisation du pilote docker basé sur le profil existant
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 10:50:58.900143 2096620 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:50:58.900305 2096620 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:50:58.900312 2096620 out.go:358] Setting ErrFile to fd 2...
	I0916 10:50:58.900317 2096620 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:50:58.901602 2096620 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:50:58.902035 2096620 out.go:352] Setting JSON to false
	I0916 10:50:58.903690 2096620 start.go:129] hostinfo: {"hostname":"ip-172-31-31-251","uptime":138801,"bootTime":1726345058,"procs":208,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1069-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"982e3628-3742-4b3e-bb63-ac1b07660ec7"}
	I0916 10:50:58.903773 2096620 start.go:139] virtualization:  
	I0916 10:50:58.907193 2096620 out.go:177] * [functional-911502] minikube v1.34.0 sur Ubuntu 20.04 (arm64)
	I0916 10:50:58.911313 2096620 out.go:177]   - MINIKUBE_LOCATION=19651
	I0916 10:50:58.911426 2096620 notify.go:220] Checking for updates...
	I0916 10:50:58.917260 2096620 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0916 10:50:58.920012 2096620 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	I0916 10:50:58.922483 2096620 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	I0916 10:50:58.925299 2096620 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0916 10:50:58.928022 2096620 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0916 10:50:58.931125 2096620 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:50:58.931715 2096620 driver.go:394] Setting default libvirt URI to qemu:///system
	I0916 10:50:58.959737 2096620 docker.go:123] docker version: linux-27.2.1:Docker Engine - Community
	I0916 10:50:58.959868 2096620 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:50:59.039889 2096620 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:3 ContainersRunning:3 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:48 OomKillDisable:true NGoroutines:71 SystemTime:2024-09-16 10:50:59.02655159 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aarc
h64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerError
s:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:50:59.039998 2096620 docker.go:318] overlay module found
	I0916 10:50:59.042899 2096620 out.go:177] * Utilisation du pilote docker basé sur le profil existant
	I0916 10:50:59.045646 2096620 start.go:297] selected driver: docker
	I0916 10:50:59.045668 2096620 start.go:901] validating driver "docker" against &{Name:functional-911502 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.45-1726358845-19644@sha256:4c67a32a16c2d4f824f00267c172fd225757ca75441e363d925dc9583137f0b0 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.31.1 ClusterName:functional-911502 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.31.1 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:do
cker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0916 10:50:59.045793 2096620 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0916 10:50:59.049030 2096620 out.go:201] 
	W0916 10:50:59.051529 2096620 out.go:270] X Fermeture en raison de RSRC_INSUFFICIENT_REQ_MEMORY : L'allocation de mémoire demandée 250 Mio est inférieure au minimum utilisable de 1800 Mo
	X Fermeture en raison de RSRC_INSUFFICIENT_REQ_MEMORY : L'allocation de mémoire demandée 250 Mio est inférieure au minimum utilisable de 1800 Mo
	I0916 10:50:59.054306 2096620 out.go:201] 

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/InternationalLanguage (0.25s)

                                                
                                    
x
+
TestFunctional/parallel/StatusCmd (1.3s)

                                                
                                                
=== RUN   TestFunctional/parallel/StatusCmd
=== PAUSE TestFunctional/parallel/StatusCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/StatusCmd
functional_test.go:854: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 status
functional_test.go:860: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 status -f host:{{.Host}},kublet:{{.Kubelet}},apiserver:{{.APIServer}},kubeconfig:{{.Kubeconfig}}
functional_test.go:872: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 status -o json
--- PASS: TestFunctional/parallel/StatusCmd (1.30s)

                                                
                                    
x
+
TestFunctional/parallel/AddonsCmd (0.16s)

                                                
                                                
=== RUN   TestFunctional/parallel/AddonsCmd
=== PAUSE TestFunctional/parallel/AddonsCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/AddonsCmd
functional_test.go:1690: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 addons list
functional_test.go:1702: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 addons list -o json
--- PASS: TestFunctional/parallel/AddonsCmd (0.16s)

                                                
                                    
x
+
TestFunctional/parallel/SSHCmd (0.69s)

                                                
                                                
=== RUN   TestFunctional/parallel/SSHCmd
=== PAUSE TestFunctional/parallel/SSHCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/SSHCmd
functional_test.go:1725: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "echo hello"
functional_test.go:1742: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "cat /etc/hostname"
--- PASS: TestFunctional/parallel/SSHCmd (0.69s)

                                                
                                    
x
+
TestFunctional/parallel/CpCmd (2.24s)

                                                
                                                
=== RUN   TestFunctional/parallel/CpCmd
=== PAUSE TestFunctional/parallel/CpCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CpCmd
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 cp testdata/cp-test.txt /home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh -n functional-911502 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 cp functional-911502:/home/docker/cp-test.txt /tmp/TestFunctionalparallelCpCmd3382788966/001/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh -n functional-911502 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 cp testdata/cp-test.txt /tmp/does/not/exist/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh -n functional-911502 "sudo cat /tmp/does/not/exist/cp-test.txt"
--- PASS: TestFunctional/parallel/CpCmd (2.24s)

                                                
                                    
x
+
TestFunctional/parallel/FileSync (0.32s)

                                                
                                                
=== RUN   TestFunctional/parallel/FileSync
=== PAUSE TestFunctional/parallel/FileSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/FileSync
functional_test.go:1929: Checking for existence of /etc/test/nested/copy/2063326/hosts within VM
functional_test.go:1931: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "sudo cat /etc/test/nested/copy/2063326/hosts"
functional_test.go:1936: file sync test content: Test file for checking file sync process
--- PASS: TestFunctional/parallel/FileSync (0.32s)

                                                
                                    
x
+
TestFunctional/parallel/CertSync (2.23s)

                                                
                                                
=== RUN   TestFunctional/parallel/CertSync
=== PAUSE TestFunctional/parallel/CertSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CertSync
functional_test.go:1972: Checking for existence of /etc/ssl/certs/2063326.pem within VM
functional_test.go:1973: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "sudo cat /etc/ssl/certs/2063326.pem"
functional_test.go:1972: Checking for existence of /usr/share/ca-certificates/2063326.pem within VM
functional_test.go:1973: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "sudo cat /usr/share/ca-certificates/2063326.pem"
functional_test.go:1972: Checking for existence of /etc/ssl/certs/51391683.0 within VM
functional_test.go:1973: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "sudo cat /etc/ssl/certs/51391683.0"
functional_test.go:1999: Checking for existence of /etc/ssl/certs/20633262.pem within VM
functional_test.go:2000: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "sudo cat /etc/ssl/certs/20633262.pem"
functional_test.go:1999: Checking for existence of /usr/share/ca-certificates/20633262.pem within VM
functional_test.go:2000: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "sudo cat /usr/share/ca-certificates/20633262.pem"
functional_test.go:1999: Checking for existence of /etc/ssl/certs/3ec20f2e.0 within VM
functional_test.go:2000: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "sudo cat /etc/ssl/certs/3ec20f2e.0"
--- PASS: TestFunctional/parallel/CertSync (2.23s)

                                                
                                    
x
+
TestFunctional/parallel/NonActiveRuntimeDisabled (0.85s)

                                                
                                                
=== RUN   TestFunctional/parallel/NonActiveRuntimeDisabled
=== PAUSE TestFunctional/parallel/NonActiveRuntimeDisabled

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NonActiveRuntimeDisabled
functional_test.go:2027: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "sudo systemctl is-active docker"
functional_test.go:2027: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 ssh "sudo systemctl is-active docker": exit status 1 (406.204151ms)

                                                
                                                
-- stdout --
	inactive

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
functional_test.go:2027: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "sudo systemctl is-active crio"
functional_test.go:2027: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 ssh "sudo systemctl is-active crio": exit status 1 (446.741881ms)

                                                
                                                
-- stdout --
	inactive

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/NonActiveRuntimeDisabled (0.85s)

                                                
                                    
x
+
TestFunctional/parallel/License (0.27s)

                                                
                                                
=== RUN   TestFunctional/parallel/License
=== PAUSE TestFunctional/parallel/License

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/License
functional_test.go:2288: (dbg) Run:  out/minikube-linux-arm64 license
--- PASS: TestFunctional/parallel/License (0.27s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel (0.63s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel
functional_test_tunnel_test.go:154: (dbg) daemon: [out/minikube-linux-arm64 -p functional-911502 tunnel --alsologtostderr]
functional_test_tunnel_test.go:154: (dbg) daemon: [out/minikube-linux-arm64 -p functional-911502 tunnel --alsologtostderr]
functional_test_tunnel_test.go:194: (dbg) stopping [out/minikube-linux-arm64 -p functional-911502 tunnel --alsologtostderr] ...
helpers_test.go:508: unable to kill pid 2093630: os: process already finished
functional_test_tunnel_test.go:194: (dbg) stopping [out/minikube-linux-arm64 -p functional-911502 tunnel --alsologtostderr] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
--- PASS: TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel (0.63s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/StartTunnel
functional_test_tunnel_test.go:129: (dbg) daemon: [out/minikube-linux-arm64 -p functional-911502 tunnel --alsologtostderr]
--- PASS: TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel
functional_test_tunnel_test.go:434: (dbg) stopping [out/minikube-linux-arm64 -p functional-911502 tunnel --alsologtostderr] ...
--- PASS: TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_not_create (0.51s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_not_create
functional_test.go:1270: (dbg) Run:  out/minikube-linux-arm64 profile lis
functional_test.go:1275: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestFunctional/parallel/ProfileCmd/profile_not_create (0.51s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_list (0.48s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_list
functional_test.go:1310: (dbg) Run:  out/minikube-linux-arm64 profile list
functional_test.go:1315: Took "412.808096ms" to run "out/minikube-linux-arm64 profile list"
functional_test.go:1324: (dbg) Run:  out/minikube-linux-arm64 profile list -l
functional_test.go:1329: Took "71.890723ms" to run "out/minikube-linux-arm64 profile list -l"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_list (0.48s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_json_output (0.52s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_json_output
functional_test.go:1361: (dbg) Run:  out/minikube-linux-arm64 profile list -o json
functional_test.go:1366: Took "432.358167ms" to run "out/minikube-linux-arm64 profile list -o json"
functional_test.go:1374: (dbg) Run:  out/minikube-linux-arm64 profile list -o json --light
functional_test.go:1379: Took "86.822224ms" to run "out/minikube-linux-arm64 profile list -o json --light"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_json_output (0.52s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/specific-port (2.06s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/specific-port
functional_test_mount_test.go:213: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdspecific-port819290008/001:/mount-9p --alsologtostderr -v=1 --port 46464]
functional_test_mount_test.go:243: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:243: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 ssh "findmnt -T /mount-9p | grep 9p": exit status 1 (425.422367ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test_mount_test.go:243: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:257: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh -- ls -la /mount-9p
functional_test_mount_test.go:261: guest mount directory contents
total 0
functional_test_mount_test.go:263: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdspecific-port819290008/001:/mount-9p --alsologtostderr -v=1 --port 46464] ...
functional_test_mount_test.go:264: reading mount text
functional_test_mount_test.go:278: done reading mount text
functional_test_mount_test.go:230: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "sudo umount -f /mount-9p"
functional_test_mount_test.go:230: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 ssh "sudo umount -f /mount-9p": exit status 1 (337.949482ms)

                                                
                                                
-- stdout --
	umount: /mount-9p: not mounted.

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 32

                                                
                                                
** /stderr **
functional_test_mount_test.go:232: "out/minikube-linux-arm64 -p functional-911502 ssh \"sudo umount -f /mount-9p\"": exit status 1
functional_test_mount_test.go:234: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdspecific-port819290008/001:/mount-9p --alsologtostderr -v=1 --port 46464] ...
--- PASS: TestFunctional/parallel/MountCmd/specific-port (2.06s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/VerifyCleanup (2.75s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/VerifyCleanup
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount1 --alsologtostderr -v=1]
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount2 --alsologtostderr -v=1]
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount3 --alsologtostderr -v=1]
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "findmnt -T" /mount1
functional_test_mount_test.go:325: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 ssh "findmnt -T" /mount1: exit status 1 (865.03265ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "findmnt -T" /mount1
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "findmnt -T" /mount2
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh "findmnt -T" /mount3
functional_test_mount_test.go:370: (dbg) Run:  out/minikube-linux-arm64 mount -p functional-911502 --kill=true
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount1 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount2 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-911502 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3293230860/001:/mount3 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
--- PASS: TestFunctional/parallel/MountCmd/VerifyCleanup (2.75s)

                                                
                                    
x
+
TestFunctional/parallel/Version/short (0.1s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/short
=== PAUSE TestFunctional/parallel/Version/short

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/short
functional_test.go:2256: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 version --short
--- PASS: TestFunctional/parallel/Version/short (0.10s)

                                                
                                    
x
+
TestFunctional/parallel/Version/components (1.3s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/components
=== PAUSE TestFunctional/parallel/Version/components

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/components
functional_test.go:2270: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 version -o=json --components
functional_test.go:2270: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 version -o=json --components: (1.299106898s)
--- PASS: TestFunctional/parallel/Version/components (1.30s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListShort (0.27s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListShort
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListShort

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListShort
functional_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image ls --format short --alsologtostderr
functional_test.go:266: (dbg) Stdout: out/minikube-linux-arm64 -p functional-911502 image ls --format short --alsologtostderr:
registry.k8s.io/pause:latest
registry.k8s.io/pause:3.3
registry.k8s.io/pause:3.10
registry.k8s.io/pause:3.1
registry.k8s.io/kube-scheduler:v1.31.1
registry.k8s.io/kube-proxy:v1.31.1
registry.k8s.io/kube-controller-manager:v1.31.1
registry.k8s.io/kube-apiserver:v1.31.1
registry.k8s.io/etcd:3.5.15-0
registry.k8s.io/coredns/coredns:v1.11.3
gcr.io/k8s-minikube/storage-provisioner:v5
docker.io/library/minikube-local-cache-test:functional-911502
docker.io/kindest/kindnetd:v20240813-c6f155d6
docker.io/kicbase/echo-server:functional-911502
functional_test.go:269: (dbg) Stderr: out/minikube-linux-arm64 -p functional-911502 image ls --format short --alsologtostderr:
I0916 10:51:14.549057 2100049 out.go:345] Setting OutFile to fd 1 ...
I0916 10:51:14.549334 2100049 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:14.549371 2100049 out.go:358] Setting ErrFile to fd 2...
I0916 10:51:14.549393 2100049 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:14.549707 2100049 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
I0916 10:51:14.550639 2100049 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:51:14.550864 2100049 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:51:14.551614 2100049 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
I0916 10:51:14.572762 2100049 ssh_runner.go:195] Run: systemctl --version
I0916 10:51:14.572817 2100049 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
I0916 10:51:14.598551 2100049 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
I0916 10:51:14.699918 2100049 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListShort (0.27s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListTable (0.26s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListTable
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListTable

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListTable
functional_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image ls --format table --alsologtostderr
functional_test.go:266: (dbg) Stdout: out/minikube-linux-arm64 -p functional-911502 image ls --format table --alsologtostderr:
|---------------------------------------------|--------------------|---------------|--------|
|                    Image                    |        Tag         |   Image ID    |  Size  |
|---------------------------------------------|--------------------|---------------|--------|
| docker.io/library/minikube-local-cache-test | functional-911502  | sha256:201a68 | 991B   |
| docker.io/kicbase/echo-server               | functional-911502  | sha256:ce2d2c | 2.17MB |
| gcr.io/k8s-minikube/storage-provisioner     | v5                 | sha256:ba04bb | 8.03MB |
| registry.k8s.io/coredns/coredns             | v1.11.3            | sha256:2f6c96 | 16.9MB |
| registry.k8s.io/kube-controller-manager     | v1.31.1            | sha256:279f38 | 23.9MB |
| registry.k8s.io/kube-proxy                  | v1.31.1            | sha256:24a140 | 26.8MB |
| registry.k8s.io/pause                       | latest             | sha256:8cb209 | 71.3kB |
| registry.k8s.io/pause                       | 3.1                | sha256:8057e0 | 262kB  |
| docker.io/kindest/kindnetd                  | v20240813-c6f155d6 | sha256:6a23fa | 33.3MB |
| registry.k8s.io/kube-apiserver              | v1.31.1            | sha256:d3f53a | 25.7MB |
| registry.k8s.io/pause                       | 3.10               | sha256:afb617 | 268kB  |
| registry.k8s.io/pause                       | 3.3                | sha256:3d1873 | 249kB  |
| registry.k8s.io/etcd                        | 3.5.15-0           | sha256:27e383 | 66.5MB |
| registry.k8s.io/kube-scheduler              | v1.31.1            | sha256:7f8aa3 | 18.5MB |
|---------------------------------------------|--------------------|---------------|--------|
functional_test.go:269: (dbg) Stderr: out/minikube-linux-arm64 -p functional-911502 image ls --format table --alsologtostderr:
I0916 10:51:15.573537 2100269 out.go:345] Setting OutFile to fd 1 ...
I0916 10:51:15.573741 2100269 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:15.573767 2100269 out.go:358] Setting ErrFile to fd 2...
I0916 10:51:15.573787 2100269 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:15.574091 2100269 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
I0916 10:51:15.574859 2100269 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:51:15.575990 2100269 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:51:15.576529 2100269 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
I0916 10:51:15.594589 2100269 ssh_runner.go:195] Run: systemctl --version
I0916 10:51:15.594671 2100269 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
I0916 10:51:15.611671 2100269 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
I0916 10:51:15.707625 2100269 ssh_runner.go:195] Run: sudo crictl images --output json
E0916 10:51:17.823686 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
--- PASS: TestFunctional/parallel/ImageCommands/ImageListTable (0.26s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListJson (0.3s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListJson
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListJson

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListJson
functional_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image ls --format json --alsologtostderr
functional_test.go:266: (dbg) Stdout: out/minikube-linux-arm64 -p functional-911502 image ls --format json --alsologtostderr:
[{"id":"sha256:201a68b2b4ee65b55299ddee7fcea31c6a61f4b34671905454bcd252d08ee9d5","repoDigests":[],"repoTags":["docker.io/library/minikube-local-cache-test:functional-911502"],"size":"991"},{"id":"sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8","repoDigests":["registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"],"repoTags":["registry.k8s.io/pause:3.10"],"size":"267933"},{"id":"sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da","repoDigests":["registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a"],"repoTags":["registry.k8s.io/etcd:3.5.15-0"],"size":"66535646"},{"id":"sha256:8057e0500773a37cde2cff041eb13ebd68c748419a2fbfd1dfb5bf38696cc8e5","repoDigests":[],"repoTags":["registry.k8s.io/pause:3.1"],"size":"262191"},{"id":"sha256:3d18732f8686cc3c878055d99a05fa80289502fa496b36b6a0fe0f77206a7300","repoDigests":[],"repoTags":["registry.k8s.io/pause:3.3"],"size":"249461"},{"id":"sha2
56:20b332c9a70d8516d849d1ac23eff5800cbb2f263d379f0ec11ee908db6b25a8","repoDigests":["docker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93"],"repoTags":[],"size":"74084559"},{"id":"sha256:ce2d2cda2d858fdaea84129deb86d18e5dbf1c548f230b79fdca74cc91729d17","repoDigests":[],"repoTags":["docker.io/kicbase/echo-server:functional-911502"],"size":"2173567"},{"id":"sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4","repoDigests":["registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"],"repoTags":["registry.k8s.io/coredns/coredns:v1.11.3"],"size":"16948420"},{"id":"sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d","repoDigests":["registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44"],"repoTags":["registry.k8s.io/kube-proxy:v1.31.1"],"size":"26756812"},{"id":"sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51"
,"repoDigests":["docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166"],"repoTags":["docker.io/kindest/kindnetd:v20240813-c6f155d6"],"size":"33309097"},{"id":"sha256:a422e0e982356f6c1cf0e5bb7b733363caae3992a07c99951fbcc73e58ed656a","repoDigests":["docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c"],"repoTags":[],"size":"18306114"},{"id":"sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853","repoDigests":["registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb"],"repoTags":["registry.k8s.io/kube-apiserver:v1.31.1"],"size":"25687130"},{"id":"sha256:8cb2091f603e75187e2f6226c5901d12e00b1d1f778c6471ae4578e8a1c4724a","repoDigests":[],"repoTags":["registry.k8s.io/pause:latest"],"size":"71300"},{"id":"sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","repoDigests":["gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d141
8e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"],"repoTags":["gcr.io/k8s-minikube/storage-provisioner:v5"],"size":"8034419"},{"id":"sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e","repoDigests":["registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1"],"repoTags":["registry.k8s.io/kube-controller-manager:v1.31.1"],"size":"23948670"},{"id":"sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d","repoDigests":["registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0"],"repoTags":["registry.k8s.io/kube-scheduler:v1.31.1"],"size":"18507674"}]
functional_test.go:269: (dbg) Stderr: out/minikube-linux-arm64 -p functional-911502 image ls --format json --alsologtostderr:
I0916 10:51:15.289511 2100190 out.go:345] Setting OutFile to fd 1 ...
I0916 10:51:15.289733 2100190 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:15.289760 2100190 out.go:358] Setting ErrFile to fd 2...
I0916 10:51:15.289780 2100190 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:15.290049 2100190 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
I0916 10:51:15.290824 2100190 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:51:15.291008 2100190 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:51:15.291570 2100190 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
I0916 10:51:15.328119 2100190 ssh_runner.go:195] Run: systemctl --version
I0916 10:51:15.328187 2100190 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
I0916 10:51:15.346671 2100190 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
I0916 10:51:15.439570 2100190 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListJson (0.30s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListYaml (0.27s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListYaml
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListYaml

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListYaml
functional_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image ls --format yaml --alsologtostderr
functional_test.go:266: (dbg) Stdout: out/minikube-linux-arm64 -p functional-911502 image ls --format yaml --alsologtostderr:
- id: sha256:8cb2091f603e75187e2f6226c5901d12e00b1d1f778c6471ae4578e8a1c4724a
repoDigests: []
repoTags:
- registry.k8s.io/pause:latest
size: "71300"
- id: sha256:6a23fa8fd2b78ab58e42ba273808edc936a9c53d8ac4a919f6337be094843a51
repoDigests:
- docker.io/kindest/kindnetd@sha256:e59a687ca28ae274a2fc92f1e2f5f1c739f353178a43a23aafc71adb802ed166
repoTags:
- docker.io/kindest/kindnetd:v20240813-c6f155d6
size: "33309097"
- id: sha256:20b332c9a70d8516d849d1ac23eff5800cbb2f263d379f0ec11ee908db6b25a8
repoDigests:
- docker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93
repoTags: []
size: "74084559"
- id: sha256:ce2d2cda2d858fdaea84129deb86d18e5dbf1c548f230b79fdca74cc91729d17
repoDigests: []
repoTags:
- docker.io/kicbase/echo-server:functional-911502
size: "2173567"
- id: sha256:7f8aa378bb47dffcf430f3a601abe39137e88aee0238e23ed8530fdd18dab82d
repoDigests:
- registry.k8s.io/kube-scheduler@sha256:969a7e96340f3a927b3d652582edec2d6d82a083871d81ef5064b7edaab430d0
repoTags:
- registry.k8s.io/kube-scheduler:v1.31.1
size: "18507674"
- id: sha256:8057e0500773a37cde2cff041eb13ebd68c748419a2fbfd1dfb5bf38696cc8e5
repoDigests: []
repoTags:
- registry.k8s.io/pause:3.1
size: "262191"
- id: sha256:201a68b2b4ee65b55299ddee7fcea31c6a61f4b34671905454bcd252d08ee9d5
repoDigests: []
repoTags:
- docker.io/library/minikube-local-cache-test:functional-911502
size: "991"
- id: sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6
repoDigests:
- gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944
repoTags:
- gcr.io/k8s-minikube/storage-provisioner:v5
size: "8034419"
- id: sha256:27e3830e1402783674d8b594038967deea9d51f0d91b34c93c8f39d2f68af7da
repoDigests:
- registry.k8s.io/etcd@sha256:a6dc63e6e8cfa0307d7851762fa6b629afb18f28d8aa3fab5a6e91b4af60026a
repoTags:
- registry.k8s.io/etcd:3.5.15-0
size: "66535646"
- id: sha256:24a140c548c075e487e45d0ee73b1aa89f8bfb40c08a57e05975559728822b1d
repoDigests:
- registry.k8s.io/kube-proxy@sha256:4ee50b00484d7f39a90fc4cda92251177ef5ad8fdf2f2a0c768f9e634b4c6d44
repoTags:
- registry.k8s.io/kube-proxy:v1.31.1
size: "26756812"
- id: sha256:3d18732f8686cc3c878055d99a05fa80289502fa496b36b6a0fe0f77206a7300
repoDigests: []
repoTags:
- registry.k8s.io/pause:3.3
size: "249461"
- id: sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4
repoDigests:
- registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e
repoTags:
- registry.k8s.io/coredns/coredns:v1.11.3
size: "16948420"
- id: sha256:d3f53a98c0a9d9163c4848bcf34b2d2f5e1e3691b79f3d1dd6d0206809e02853
repoDigests:
- registry.k8s.io/kube-apiserver@sha256:2409c23dbb5a2b7a81adbb184d3eac43ac653e9b97a7c0ee121b89bb3ef61fdb
repoTags:
- registry.k8s.io/kube-apiserver:v1.31.1
size: "25687130"
- id: sha256:279f381cb37365bbbcd133c9531fba9c2beb0f38dbbe6ddfcd0b1b1643d3450e
repoDigests:
- registry.k8s.io/kube-controller-manager@sha256:9f9da5b27e03f89599cc40ba89150aebf3b4cff001e6db6d998674b34181e1a1
repoTags:
- registry.k8s.io/kube-controller-manager:v1.31.1
size: "23948670"
- id: sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8
repoDigests:
- registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a
repoTags:
- registry.k8s.io/pause:3.10
size: "267933"
- id: sha256:a422e0e982356f6c1cf0e5bb7b733363caae3992a07c99951fbcc73e58ed656a
repoDigests:
- docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c
repoTags: []
size: "18306114"

                                                
                                                
functional_test.go:269: (dbg) Stderr: out/minikube-linux-arm64 -p functional-911502 image ls --format yaml --alsologtostderr:
I0916 10:51:14.822976 2100114 out.go:345] Setting OutFile to fd 1 ...
I0916 10:51:14.823184 2100114 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:14.823209 2100114 out.go:358] Setting ErrFile to fd 2...
I0916 10:51:14.823230 2100114 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:14.823539 2100114 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
I0916 10:51:14.824247 2100114 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:51:14.824562 2100114 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:51:14.825112 2100114 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
I0916 10:51:14.845385 2100114 ssh_runner.go:195] Run: systemctl --version
I0916 10:51:14.845461 2100114 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
I0916 10:51:14.864611 2100114 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
I0916 10:51:14.964411 2100114 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListYaml (0.27s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageBuild (3.65s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageBuild
=== PAUSE TestFunctional/parallel/ImageCommands/ImageBuild

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageBuild
functional_test.go:308: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 ssh pgrep buildkitd
functional_test.go:308: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-911502 ssh pgrep buildkitd: exit status 1 (369.838456ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test.go:315: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image build -t localhost/my-image:functional-911502 testdata/build --alsologtostderr
functional_test.go:315: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 image build -t localhost/my-image:functional-911502 testdata/build --alsologtostderr: (3.051096595s)
functional_test.go:323: (dbg) Stderr: out/minikube-linux-arm64 -p functional-911502 image build -t localhost/my-image:functional-911502 testdata/build --alsologtostderr:
I0916 10:51:15.472111 2100241 out.go:345] Setting OutFile to fd 1 ...
I0916 10:51:15.473328 2100241 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:15.473337 2100241 out.go:358] Setting ErrFile to fd 2...
I0916 10:51:15.473343 2100241 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0916 10:51:15.473600 2100241 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
I0916 10:51:15.474334 2100241 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:51:15.476246 2100241 config.go:182] Loaded profile config "functional-911502": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
I0916 10:51:15.476769 2100241 cli_runner.go:164] Run: docker container inspect functional-911502 --format={{.State.Status}}
I0916 10:51:15.498261 2100241 ssh_runner.go:195] Run: systemctl --version
I0916 10:51:15.498319 2100241 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-911502
I0916 10:51:15.526481 2100241 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40592 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/functional-911502/id_rsa Username:docker}
I0916 10:51:15.624205 2100241 build_images.go:161] Building image from path: /tmp/build.3816184753.tar
I0916 10:51:15.624302 2100241 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/build
I0916 10:51:15.640803 2100241 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/build/build.3816184753.tar
I0916 10:51:15.651104 2100241 ssh_runner.go:352] existence check for /var/lib/minikube/build/build.3816184753.tar: stat -c "%s %y" /var/lib/minikube/build/build.3816184753.tar: Process exited with status 1
stdout:

                                                
                                                
stderr:
stat: cannot statx '/var/lib/minikube/build/build.3816184753.tar': No such file or directory
I0916 10:51:15.651154 2100241 ssh_runner.go:362] scp /tmp/build.3816184753.tar --> /var/lib/minikube/build/build.3816184753.tar (3072 bytes)
I0916 10:51:15.679894 2100241 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/build/build.3816184753
I0916 10:51:15.689290 2100241 ssh_runner.go:195] Run: sudo tar -C /var/lib/minikube/build/build.3816184753 -xf /var/lib/minikube/build/build.3816184753.tar
I0916 10:51:15.699419 2100241 containerd.go:394] Building image: /var/lib/minikube/build/build.3816184753
I0916 10:51:15.699498 2100241 ssh_runner.go:195] Run: sudo buildctl build --frontend dockerfile.v0 --local context=/var/lib/minikube/build/build.3816184753 --local dockerfile=/var/lib/minikube/build/build.3816184753 --output type=image,name=localhost/my-image:functional-911502
#1 [internal] load build definition from Dockerfile
#1 transferring dockerfile: 97B done
#1 DONE 0.0s

                                                
                                                
#2 [internal] load metadata for gcr.io/k8s-minikube/busybox:latest
#2 DONE 1.2s

                                                
                                                
#3 [internal] load .dockerignore
#3 transferring context: 2B done
#3 DONE 0.0s

                                                
                                                
#4 [internal] load build context
#4 transferring context: 62B done
#4 DONE 0.0s

                                                
                                                
#5 [1/3] FROM gcr.io/k8s-minikube/busybox:latest@sha256:ca5ae90100d50772da31f3b5016209e25ad61972404e2ccd83d44f10dee7e79b
#5 resolve gcr.io/k8s-minikube/busybox:latest@sha256:ca5ae90100d50772da31f3b5016209e25ad61972404e2ccd83d44f10dee7e79b 0.0s done
#5 DONE 0.1s

                                                
                                                
#5 [1/3] FROM gcr.io/k8s-minikube/busybox:latest@sha256:ca5ae90100d50772da31f3b5016209e25ad61972404e2ccd83d44f10dee7e79b
#5 sha256:a01966dde7f8d5ba10b6d87e776c7c8fb5a5f6bfa678874bd28b33b1fc6dba34 0B / 828.50kB 0.2s
#5 sha256:a01966dde7f8d5ba10b6d87e776c7c8fb5a5f6bfa678874bd28b33b1fc6dba34 828.50kB / 828.50kB 0.4s done
#5 extracting sha256:a01966dde7f8d5ba10b6d87e776c7c8fb5a5f6bfa678874bd28b33b1fc6dba34 0.1s done
#5 DONE 0.5s

                                                
                                                
#6 [2/3] RUN true
#6 DONE 0.6s

                                                
                                                
#7 [3/3] ADD content.txt /
#7 DONE 0.0s

                                                
                                                
#8 exporting to image
#8 exporting layers 0.1s done
#8 exporting manifest sha256:ddeaafd087b8cd29a7e076f1c9d2eb43f3f210e77e13212bb6debaa18dea2c03 0.0s done
#8 exporting config sha256:41272cf70d5ab8922a22e6b0e9b35804a4de8711789c8712e000110880968e4f 0.0s done
#8 naming to localhost/my-image:functional-911502 done
#8 DONE 0.1s
I0916 10:51:18.434329 2100241 ssh_runner.go:235] Completed: sudo buildctl build --frontend dockerfile.v0 --local context=/var/lib/minikube/build/build.3816184753 --local dockerfile=/var/lib/minikube/build/build.3816184753 --output type=image,name=localhost/my-image:functional-911502: (2.734800236s)
I0916 10:51:18.434407 2100241 ssh_runner.go:195] Run: sudo rm -rf /var/lib/minikube/build/build.3816184753
I0916 10:51:18.444252 2100241 ssh_runner.go:195] Run: sudo rm -f /var/lib/minikube/build/build.3816184753.tar
I0916 10:51:18.454573 2100241 build_images.go:217] Built localhost/my-image:functional-911502 from /tmp/build.3816184753.tar
I0916 10:51:18.454603 2100241 build_images.go:133] succeeded building to: functional-911502
I0916 10:51:18.454609 2100241 build_images.go:134] failed building to: 
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageBuild (3.65s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/Setup (0.72s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/Setup
functional_test.go:342: (dbg) Run:  docker pull kicbase/echo-server:1.0
functional_test.go:347: (dbg) Run:  docker tag kicbase/echo-server:1.0 kicbase/echo-server:functional-911502
--- PASS: TestFunctional/parallel/ImageCommands/Setup (0.72s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageLoadDaemon (1.9s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageLoadDaemon
functional_test.go:355: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image load --daemon kicbase/echo-server:functional-911502 --alsologtostderr
functional_test.go:355: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 image load --daemon kicbase/echo-server:functional-911502 --alsologtostderr: (1.426351248s)
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageLoadDaemon (1.90s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageReloadDaemon (1.91s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageReloadDaemon
functional_test.go:365: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image load --daemon kicbase/echo-server:functional-911502 --alsologtostderr
functional_test.go:365: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 image load --daemon kicbase/echo-server:functional-911502 --alsologtostderr: (1.534092577s)
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageReloadDaemon (1.91s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon (1.74s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon
functional_test.go:235: (dbg) Run:  docker pull kicbase/echo-server:latest
functional_test.go:240: (dbg) Run:  docker tag kicbase/echo-server:latest kicbase/echo-server:functional-911502
functional_test.go:245: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image load --daemon kicbase/echo-server:functional-911502 --alsologtostderr
functional_test.go:245: (dbg) Done: out/minikube-linux-arm64 -p functional-911502 image load --daemon kicbase/echo-server:functional-911502 --alsologtostderr: (1.173268671s)
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon (1.74s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageSaveToFile (0.49s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageSaveToFile
functional_test.go:380: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image save kicbase/echo-server:functional-911502 /home/jenkins/workspace/Docker_Linux_containerd_arm64/echo-server-save.tar --alsologtostderr
--- PASS: TestFunctional/parallel/ImageCommands/ImageSaveToFile (0.49s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageRemove (0.68s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageRemove
functional_test.go:392: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image rm kicbase/echo-server:functional-911502 --alsologtostderr
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageRemove (0.68s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageLoadFromFile (0.88s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageLoadFromFile
functional_test.go:409: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image load /home/jenkins/workspace/Docker_Linux_containerd_arm64/echo-server-save.tar --alsologtostderr
functional_test.go:451: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageLoadFromFile (0.88s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageSaveDaemon (0.5s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageSaveDaemon
functional_test.go:419: (dbg) Run:  docker rmi kicbase/echo-server:functional-911502
functional_test.go:424: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 image save --daemon kicbase/echo-server:functional-911502 --alsologtostderr
functional_test.go:432: (dbg) Run:  docker image inspect kicbase/echo-server:functional-911502
--- PASS: TestFunctional/parallel/ImageCommands/ImageSaveDaemon (0.50s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_changes (0.23s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_changes
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_changes

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_changes
functional_test.go:2119: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_changes (0.23s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (0.16s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
functional_test.go:2119: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (0.16s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_clusters (0.18s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_clusters
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_clusters

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_clusters
functional_test.go:2119: (dbg) Run:  out/minikube-linux-arm64 -p functional-911502 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_clusters (0.18s)

                                                
                                    
x
+
TestFunctional/delete_echo-server_images (0.04s)

                                                
                                                
=== RUN   TestFunctional/delete_echo-server_images
functional_test.go:190: (dbg) Run:  docker rmi -f kicbase/echo-server:1.0
functional_test.go:190: (dbg) Run:  docker rmi -f kicbase/echo-server:functional-911502
--- PASS: TestFunctional/delete_echo-server_images (0.04s)

                                                
                                    
x
+
TestFunctional/delete_my-image_image (0.02s)

                                                
                                                
=== RUN   TestFunctional/delete_my-image_image
functional_test.go:198: (dbg) Run:  docker rmi -f localhost/my-image:functional-911502
--- PASS: TestFunctional/delete_my-image_image (0.02s)

                                                
                                    
x
+
TestFunctional/delete_minikube_cached_images (0.02s)

                                                
                                                
=== RUN   TestFunctional/delete_minikube_cached_images
functional_test.go:206: (dbg) Run:  docker rmi -f minikube-local-cache-test:functional-911502
--- PASS: TestFunctional/delete_minikube_cached_images (0.02s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StartCluster (118.73s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StartCluster
ha_test.go:101: (dbg) Run:  out/minikube-linux-arm64 start -p ha-234759 --wait=true --memory=2200 --ha -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd
ha_test.go:101: (dbg) Done: out/minikube-linux-arm64 start -p ha-234759 --wait=true --memory=2200 --ha -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd: (1m57.929130315s)
ha_test.go:107: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr
--- PASS: TestMultiControlPlane/serial/StartCluster (118.73s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DeployApp (33.31s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DeployApp
ha_test.go:128: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- apply -f ./testdata/ha/ha-pod-dns-test.yaml
ha_test.go:133: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- rollout status deployment/busybox
E0916 10:53:33.956777 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:133: (dbg) Done: out/minikube-linux-arm64 kubectl -p ha-234759 -- rollout status deployment/busybox: (30.226812578s)
ha_test.go:140: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- get pods -o jsonpath='{.items[*].status.podIP}'
ha_test.go:163: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- get pods -o jsonpath='{.items[*].metadata.name}'
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-7l4g7 -- nslookup kubernetes.io
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-kjr9x -- nslookup kubernetes.io
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-m9lsb -- nslookup kubernetes.io
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-7l4g7 -- nslookup kubernetes.default
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-kjr9x -- nslookup kubernetes.default
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-m9lsb -- nslookup kubernetes.default
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-7l4g7 -- nslookup kubernetes.default.svc.cluster.local
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-kjr9x -- nslookup kubernetes.default.svc.cluster.local
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-m9lsb -- nslookup kubernetes.default.svc.cluster.local
--- PASS: TestMultiControlPlane/serial/DeployApp (33.31s)

                                                
                                    
x
+
TestMultiControlPlane/serial/PingHostFromPods (1.65s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/PingHostFromPods
ha_test.go:199: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- get pods -o jsonpath='{.items[*].metadata.name}'
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-7l4g7 -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-7l4g7 -- sh -c "ping -c 1 192.168.49.1"
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-kjr9x -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-kjr9x -- sh -c "ping -c 1 192.168.49.1"
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-m9lsb -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-234759 -- exec busybox-7dff88458-m9lsb -- sh -c "ping -c 1 192.168.49.1"
--- PASS: TestMultiControlPlane/serial/PingHostFromPods (1.65s)

                                                
                                    
x
+
TestMultiControlPlane/serial/AddWorkerNode (25.74s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/AddWorkerNode
ha_test.go:228: (dbg) Run:  out/minikube-linux-arm64 node add -p ha-234759 -v=7 --alsologtostderr
E0916 10:54:01.665606 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:07.672560 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:07.679322 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:07.690790 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:07.712110 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:07.753519 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:07.835024 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:07.997078 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:08.318935 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:08.960214 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:10.241916 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:12.804309 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
E0916 10:54:17.926194 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:228: (dbg) Done: out/minikube-linux-arm64 node add -p ha-234759 -v=7 --alsologtostderr: (24.750304947s)
ha_test.go:234: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr
--- PASS: TestMultiControlPlane/serial/AddWorkerNode (25.74s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterClusterStart (0.78s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterClusterStart
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/HAppyAfterClusterStart (0.78s)

                                                
                                    
x
+
TestMultiControlPlane/serial/CopyFile (19.57s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/CopyFile
ha_test.go:326: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 status --output json -v=7 --alsologtostderr
ha_test.go:326: (dbg) Done: out/minikube-linux-arm64 -p ha-234759 status --output json -v=7 --alsologtostderr: (1.059383781s)
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp testdata/cp-test.txt ha-234759:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3470256434/001/cp-test_ha-234759.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759:/home/docker/cp-test.txt ha-234759-m02:/home/docker/cp-test_ha-234759_ha-234759-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m02 "sudo cat /home/docker/cp-test_ha-234759_ha-234759-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759:/home/docker/cp-test.txt ha-234759-m03:/home/docker/cp-test_ha-234759_ha-234759-m03.txt
E0916 10:54:28.167839 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m03 "sudo cat /home/docker/cp-test_ha-234759_ha-234759-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759:/home/docker/cp-test.txt ha-234759-m04:/home/docker/cp-test_ha-234759_ha-234759-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m04 "sudo cat /home/docker/cp-test_ha-234759_ha-234759-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp testdata/cp-test.txt ha-234759-m02:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m02:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3470256434/001/cp-test_ha-234759-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m02:/home/docker/cp-test.txt ha-234759:/home/docker/cp-test_ha-234759-m02_ha-234759.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759 "sudo cat /home/docker/cp-test_ha-234759-m02_ha-234759.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m02:/home/docker/cp-test.txt ha-234759-m03:/home/docker/cp-test_ha-234759-m02_ha-234759-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m03 "sudo cat /home/docker/cp-test_ha-234759-m02_ha-234759-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m02:/home/docker/cp-test.txt ha-234759-m04:/home/docker/cp-test_ha-234759-m02_ha-234759-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m04 "sudo cat /home/docker/cp-test_ha-234759-m02_ha-234759-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp testdata/cp-test.txt ha-234759-m03:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m03:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3470256434/001/cp-test_ha-234759-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m03:/home/docker/cp-test.txt ha-234759:/home/docker/cp-test_ha-234759-m03_ha-234759.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759 "sudo cat /home/docker/cp-test_ha-234759-m03_ha-234759.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m03:/home/docker/cp-test.txt ha-234759-m02:/home/docker/cp-test_ha-234759-m03_ha-234759-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m02 "sudo cat /home/docker/cp-test_ha-234759-m03_ha-234759-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m03:/home/docker/cp-test.txt ha-234759-m04:/home/docker/cp-test_ha-234759-m03_ha-234759-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m04 "sudo cat /home/docker/cp-test_ha-234759-m03_ha-234759-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp testdata/cp-test.txt ha-234759-m04:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile3470256434/001/cp-test_ha-234759-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt ha-234759:/home/docker/cp-test_ha-234759-m04_ha-234759.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759 "sudo cat /home/docker/cp-test_ha-234759-m04_ha-234759.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt ha-234759-m02:/home/docker/cp-test_ha-234759-m04_ha-234759-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m02 "sudo cat /home/docker/cp-test_ha-234759-m04_ha-234759-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 cp ha-234759-m04:/home/docker/cp-test.txt ha-234759-m03:/home/docker/cp-test_ha-234759-m04_ha-234759-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 ssh -n ha-234759-m03 "sudo cat /home/docker/cp-test_ha-234759-m04_ha-234759-m03.txt"
--- PASS: TestMultiControlPlane/serial/CopyFile (19.57s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StopSecondaryNode (12.93s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StopSecondaryNode
ha_test.go:363: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 node stop m02 -v=7 --alsologtostderr
E0916 10:54:48.650069 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:363: (dbg) Done: out/minikube-linux-arm64 -p ha-234759 node stop m02 -v=7 --alsologtostderr: (12.180103984s)
ha_test.go:369: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr
ha_test.go:369: (dbg) Non-zero exit: out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr: exit status 7 (754.164259ms)

                                                
                                                
-- stdout --
	ha-234759
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	ha-234759-m02
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-234759-m03
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	ha-234759-m04
	type: Worker
	host: Running
	kubelet: Running
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 10:54:56.244470 2116689 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:54:56.244684 2116689 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:54:56.244714 2116689 out.go:358] Setting ErrFile to fd 2...
	I0916 10:54:56.244733 2116689 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:54:56.244987 2116689 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:54:56.245204 2116689 out.go:352] Setting JSON to false
	I0916 10:54:56.245268 2116689 mustload.go:65] Loading cluster: ha-234759
	I0916 10:54:56.245354 2116689 notify.go:220] Checking for updates...
	I0916 10:54:56.245780 2116689 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:54:56.245817 2116689 status.go:255] checking status of ha-234759 ...
	I0916 10:54:56.246391 2116689 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:54:56.266417 2116689 status.go:330] ha-234759 host status = "Running" (err=<nil>)
	I0916 10:54:56.266440 2116689 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:54:56.266837 2116689 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759
	I0916 10:54:56.291253 2116689 host.go:66] Checking if "ha-234759" exists ...
	I0916 10:54:56.291577 2116689 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:54:56.291641 2116689 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759
	I0916 10:54:56.311563 2116689 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40597 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759/id_rsa Username:docker}
	I0916 10:54:56.412050 2116689 ssh_runner.go:195] Run: systemctl --version
	I0916 10:54:56.416634 2116689 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:54:56.430901 2116689 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 10:54:56.493938 2116689 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:6 ContainersRunning:5 ContainersPaused:0 ContainersStopped:1 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:71 OomKillDisable:true NGoroutines:91 SystemTime:2024-09-16 10:54:56.482767604 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 10:54:56.494643 2116689 kubeconfig.go:125] found "ha-234759" server: "https://192.168.49.254:8443"
	I0916 10:54:56.494772 2116689 api_server.go:166] Checking apiserver status ...
	I0916 10:54:56.494842 2116689 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:54:56.506818 2116689 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1388/cgroup
	I0916 10:54:56.516589 2116689 api_server.go:182] apiserver freezer: "13:freezer:/docker/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/kubepods/burstable/pod13247b24f7802e7f701dc896ad904393/fd48034050bae874c9e190debc0b1bfa138cdf53fc5887bd15f027e7346ca82d"
	I0916 10:54:56.516688 2116689 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/6306ac5a5985a7ba1fa2251f8ee285d26b8dd42af49e1101f624e77c0e079f59/kubepods/burstable/pod13247b24f7802e7f701dc896ad904393/fd48034050bae874c9e190debc0b1bfa138cdf53fc5887bd15f027e7346ca82d/freezer.state
	I0916 10:54:56.525557 2116689 api_server.go:204] freezer state: "THAWED"
	I0916 10:54:56.525588 2116689 api_server.go:253] Checking apiserver healthz at https://192.168.49.254:8443/healthz ...
	I0916 10:54:56.533593 2116689 api_server.go:279] https://192.168.49.254:8443/healthz returned 200:
	ok
	I0916 10:54:56.533624 2116689 status.go:422] ha-234759 apiserver status = Running (err=<nil>)
	I0916 10:54:56.533635 2116689 status.go:257] ha-234759 status: &{Name:ha-234759 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 10:54:56.533653 2116689 status.go:255] checking status of ha-234759-m02 ...
	I0916 10:54:56.533995 2116689 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:54:56.553905 2116689 status.go:330] ha-234759-m02 host status = "Stopped" (err=<nil>)
	I0916 10:54:56.553945 2116689 status.go:343] host is not running, skipping remaining checks
	I0916 10:54:56.553955 2116689 status.go:257] ha-234759-m02 status: &{Name:ha-234759-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 10:54:56.553976 2116689 status.go:255] checking status of ha-234759-m03 ...
	I0916 10:54:56.554586 2116689 cli_runner.go:164] Run: docker container inspect ha-234759-m03 --format={{.State.Status}}
	I0916 10:54:56.574303 2116689 status.go:330] ha-234759-m03 host status = "Running" (err=<nil>)
	I0916 10:54:56.574332 2116689 host.go:66] Checking if "ha-234759-m03" exists ...
	I0916 10:54:56.574951 2116689 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m03
	I0916 10:54:56.599505 2116689 host.go:66] Checking if "ha-234759-m03" exists ...
	I0916 10:54:56.599877 2116689 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:54:56.599924 2116689 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m03
	I0916 10:54:56.620695 2116689 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40607 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m03/id_rsa Username:docker}
	I0916 10:54:56.715939 2116689 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:54:56.730857 2116689 kubeconfig.go:125] found "ha-234759" server: "https://192.168.49.254:8443"
	I0916 10:54:56.730888 2116689 api_server.go:166] Checking apiserver status ...
	I0916 10:54:56.730934 2116689 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 10:54:56.742482 2116689 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1399/cgroup
	I0916 10:54:56.754045 2116689 api_server.go:182] apiserver freezer: "13:freezer:/docker/5168f6623ac01a5854a982b84db019755b131a7e6d58baa7043071e5defb5fb3/kubepods/burstable/pod255fe4cb897c7454b3ab3d95a22d092c/f1cbe1babda1e21e723694178929d5f501de98bf153a11c6e01e0a5339526fce"
	I0916 10:54:56.754125 2116689 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/5168f6623ac01a5854a982b84db019755b131a7e6d58baa7043071e5defb5fb3/kubepods/burstable/pod255fe4cb897c7454b3ab3d95a22d092c/f1cbe1babda1e21e723694178929d5f501de98bf153a11c6e01e0a5339526fce/freezer.state
	I0916 10:54:56.764367 2116689 api_server.go:204] freezer state: "THAWED"
	I0916 10:54:56.764455 2116689 api_server.go:253] Checking apiserver healthz at https://192.168.49.254:8443/healthz ...
	I0916 10:54:56.772219 2116689 api_server.go:279] https://192.168.49.254:8443/healthz returned 200:
	ok
	I0916 10:54:56.772246 2116689 status.go:422] ha-234759-m03 apiserver status = Running (err=<nil>)
	I0916 10:54:56.772256 2116689 status.go:257] ha-234759-m03 status: &{Name:ha-234759-m03 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 10:54:56.772285 2116689 status.go:255] checking status of ha-234759-m04 ...
	I0916 10:54:56.772610 2116689 cli_runner.go:164] Run: docker container inspect ha-234759-m04 --format={{.State.Status}}
	I0916 10:54:56.790118 2116689 status.go:330] ha-234759-m04 host status = "Running" (err=<nil>)
	I0916 10:54:56.790143 2116689 host.go:66] Checking if "ha-234759-m04" exists ...
	I0916 10:54:56.790458 2116689 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-234759-m04
	I0916 10:54:56.811166 2116689 host.go:66] Checking if "ha-234759-m04" exists ...
	I0916 10:54:56.811509 2116689 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 10:54:56.811554 2116689 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-234759-m04
	I0916 10:54:56.838395 2116689 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40612 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/ha-234759-m04/id_rsa Username:docker}
	I0916 10:54:56.935749 2116689 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 10:54:56.948853 2116689 status.go:257] ha-234759-m04 status: &{Name:ha-234759-m04 Host:Running Kubelet:Running APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiControlPlane/serial/StopSecondaryNode (12.93s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop (0.6s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop
ha_test.go:390: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop (0.60s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart (0.78s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart (0.78s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartClusterKeepsNodes (141.39s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartClusterKeepsNodes
ha_test.go:456: (dbg) Run:  out/minikube-linux-arm64 node list -p ha-234759 -v=7 --alsologtostderr
ha_test.go:462: (dbg) Run:  out/minikube-linux-arm64 stop -p ha-234759 -v=7 --alsologtostderr
E0916 10:55:29.611716 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:462: (dbg) Done: out/minikube-linux-arm64 stop -p ha-234759 -v=7 --alsologtostderr: (37.564491365s)
ha_test.go:467: (dbg) Run:  out/minikube-linux-arm64 start -p ha-234759 --wait=true -v=7 --alsologtostderr
E0916 10:56:51.533045 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:467: (dbg) Done: out/minikube-linux-arm64 start -p ha-234759 --wait=true -v=7 --alsologtostderr: (1m43.635530256s)
ha_test.go:472: (dbg) Run:  out/minikube-linux-arm64 node list -p ha-234759
--- PASS: TestMultiControlPlane/serial/RestartClusterKeepsNodes (141.39s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete (0.57s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete
ha_test.go:390: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete (0.57s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StopCluster (36.07s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StopCluster
ha_test.go:531: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 stop -v=7 --alsologtostderr
ha_test.go:531: (dbg) Done: out/minikube-linux-arm64 -p ha-234759 stop -v=7 --alsologtostderr: (35.962517399s)
ha_test.go:537: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr
ha_test.go:537: (dbg) Non-zero exit: out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr: exit status 7 (107.113467ms)

                                                
                                                
-- stdout --
	ha-234759
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-234759-m02
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-234759-m04
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 10:58:30.987945 2131614 out.go:345] Setting OutFile to fd 1 ...
	I0916 10:58:30.988096 2131614 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:58:30.988105 2131614 out.go:358] Setting ErrFile to fd 2...
	I0916 10:58:30.988111 2131614 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 10:58:30.988360 2131614 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 10:58:30.988552 2131614 out.go:352] Setting JSON to false
	I0916 10:58:30.988586 2131614 mustload.go:65] Loading cluster: ha-234759
	I0916 10:58:30.988688 2131614 notify.go:220] Checking for updates...
	I0916 10:58:30.989021 2131614 config.go:182] Loaded profile config "ha-234759": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 10:58:30.989039 2131614 status.go:255] checking status of ha-234759 ...
	I0916 10:58:30.989877 2131614 cli_runner.go:164] Run: docker container inspect ha-234759 --format={{.State.Status}}
	I0916 10:58:31.010861 2131614 status.go:330] ha-234759 host status = "Stopped" (err=<nil>)
	I0916 10:58:31.010884 2131614 status.go:343] host is not running, skipping remaining checks
	I0916 10:58:31.010898 2131614 status.go:257] ha-234759 status: &{Name:ha-234759 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 10:58:31.010936 2131614 status.go:255] checking status of ha-234759-m02 ...
	I0916 10:58:31.011266 2131614 cli_runner.go:164] Run: docker container inspect ha-234759-m02 --format={{.State.Status}}
	I0916 10:58:31.033174 2131614 status.go:330] ha-234759-m02 host status = "Stopped" (err=<nil>)
	I0916 10:58:31.033194 2131614 status.go:343] host is not running, skipping remaining checks
	I0916 10:58:31.033202 2131614 status.go:257] ha-234759-m02 status: &{Name:ha-234759-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 10:58:31.033225 2131614 status.go:255] checking status of ha-234759-m04 ...
	I0916 10:58:31.033529 2131614 cli_runner.go:164] Run: docker container inspect ha-234759-m04 --format={{.State.Status}}
	I0916 10:58:31.049904 2131614 status.go:330] ha-234759-m04 host status = "Stopped" (err=<nil>)
	I0916 10:58:31.049925 2131614 status.go:343] host is not running, skipping remaining checks
	I0916 10:58:31.049932 2131614 status.go:257] ha-234759-m04 status: &{Name:ha-234759-m04 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiControlPlane/serial/StopCluster (36.07s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterClusterRestart (0.54s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterClusterRestart
ha_test.go:390: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterClusterRestart (0.54s)

                                                
                                    
x
+
TestMultiControlPlane/serial/AddSecondaryNode (44.85s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/AddSecondaryNode
ha_test.go:605: (dbg) Run:  out/minikube-linux-arm64 node add -p ha-234759 --control-plane -v=7 --alsologtostderr
ha_test.go:605: (dbg) Done: out/minikube-linux-arm64 node add -p ha-234759 --control-plane -v=7 --alsologtostderr: (43.777721696s)
ha_test.go:611: (dbg) Run:  out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr
ha_test.go:611: (dbg) Done: out/minikube-linux-arm64 -p ha-234759 status -v=7 --alsologtostderr: (1.069614022s)
--- PASS: TestMultiControlPlane/serial/AddSecondaryNode (44.85s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd (0.79s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd (0.79s)

                                                
                                    
x
+
TestJSONOutput/start/Command (89.03s)

                                                
                                                
=== RUN   TestJSONOutput/start/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 start -p json-output-478257 --output=json --user=testUser --memory=2200 --wait=true --driver=docker  --container-runtime=containerd
json_output_test.go:63: (dbg) Done: out/minikube-linux-arm64 start -p json-output-478257 --output=json --user=testUser --memory=2200 --wait=true --driver=docker  --container-runtime=containerd: (1m29.017942339s)
--- PASS: TestJSONOutput/start/Command (89.03s)

                                                
                                    
x
+
TestJSONOutput/start/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/Audit
--- PASS: TestJSONOutput/start/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/start/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/start/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/Command (0.77s)

                                                
                                                
=== RUN   TestJSONOutput/pause/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 pause -p json-output-478257 --output=json --user=testUser
--- PASS: TestJSONOutput/pause/Command (0.77s)

                                                
                                    
x
+
TestJSONOutput/pause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/Audit
--- PASS: TestJSONOutput/pause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/Command (0.69s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 unpause -p json-output-478257 --output=json --user=testUser
--- PASS: TestJSONOutput/unpause/Command (0.69s)

                                                
                                    
x
+
TestJSONOutput/unpause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/Audit
--- PASS: TestJSONOutput/unpause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/Command (5.86s)

                                                
                                                
=== RUN   TestJSONOutput/stop/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 stop -p json-output-478257 --output=json --user=testUser
json_output_test.go:63: (dbg) Done: out/minikube-linux-arm64 stop -p json-output-478257 --output=json --user=testUser: (5.856274999s)
--- PASS: TestJSONOutput/stop/Command (5.86s)

                                                
                                    
x
+
TestJSONOutput/stop/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/Audit
--- PASS: TestJSONOutput/stop/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestErrorJSONOutput (0.23s)

                                                
                                                
=== RUN   TestErrorJSONOutput
json_output_test.go:160: (dbg) Run:  out/minikube-linux-arm64 start -p json-output-error-756385 --memory=2200 --output=json --wait=true --driver=fail
json_output_test.go:160: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p json-output-error-756385 --memory=2200 --output=json --wait=true --driver=fail: exit status 56 (82.948137ms)

                                                
                                                
-- stdout --
	{"specversion":"1.0","id":"8704369f-d82f-468c-a3b3-314f193aa295","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"0","message":"[json-output-error-756385] minikube v1.34.0 on Ubuntu 20.04 (arm64)","name":"Initial Minikube Setup","totalsteps":"19"}}
	{"specversion":"1.0","id":"e6569c86-cb74-4c56-a604-15cd21b4f32c","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_LOCATION=19651"}}
	{"specversion":"1.0","id":"addc852a-0b57-45ce-894a-597f7405e6cd","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true"}}
	{"specversion":"1.0","id":"3c0cb976-a3ad-4c09-8617-9f98c9bddea9","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig"}}
	{"specversion":"1.0","id":"c45e3468-3c79-46ca-a3d8-97a631b9d311","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube"}}
	{"specversion":"1.0","id":"316e3130-891d-4f52-a9bd-1ddcf0c81f76","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_BIN=out/minikube-linux-arm64"}}
	{"specversion":"1.0","id":"59a149be-061e-4eb9-bc36-db303497249c","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_FORCE_SYSTEMD="}}
	{"specversion":"1.0","id":"441bc83c-f819-426d-9fe7-ade7798d09ca","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.error","datacontenttype":"application/json","data":{"advice":"","exitcode":"56","issues":"","message":"The driver 'fail' is not supported on linux/arm64","name":"DRV_UNSUPPORTED_OS","url":""}}

                                                
                                                
-- /stdout --
helpers_test.go:175: Cleaning up "json-output-error-756385" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p json-output-error-756385
--- PASS: TestErrorJSONOutput (0.23s)

                                                
                                    
x
+
TestKicCustomNetwork/create_custom_network (39.77s)

                                                
                                                
=== RUN   TestKicCustomNetwork/create_custom_network
kic_custom_network_test.go:57: (dbg) Run:  out/minikube-linux-arm64 start -p docker-network-307367 --network=
kic_custom_network_test.go:57: (dbg) Done: out/minikube-linux-arm64 start -p docker-network-307367 --network=: (37.635458081s)
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
helpers_test.go:175: Cleaning up "docker-network-307367" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p docker-network-307367
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p docker-network-307367: (2.11377477s)
--- PASS: TestKicCustomNetwork/create_custom_network (39.77s)

                                                
                                    
x
+
TestKicCustomNetwork/use_default_bridge_network (35.46s)

                                                
                                                
=== RUN   TestKicCustomNetwork/use_default_bridge_network
kic_custom_network_test.go:57: (dbg) Run:  out/minikube-linux-arm64 start -p docker-network-221494 --network=bridge
E0916 11:03:33.955955 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
kic_custom_network_test.go:57: (dbg) Done: out/minikube-linux-arm64 start -p docker-network-221494 --network=bridge: (33.492798293s)
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
helpers_test.go:175: Cleaning up "docker-network-221494" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p docker-network-221494
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p docker-network-221494: (1.94404579s)
--- PASS: TestKicCustomNetwork/use_default_bridge_network (35.46s)

                                                
                                    
x
+
TestKicExistingNetwork (32.08s)

                                                
                                                
=== RUN   TestKicExistingNetwork
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
kic_custom_network_test.go:93: (dbg) Run:  out/minikube-linux-arm64 start -p existing-network-044425 --network=existing-network
E0916 11:04:07.672596 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
kic_custom_network_test.go:93: (dbg) Done: out/minikube-linux-arm64 start -p existing-network-044425 --network=existing-network: (29.935670021s)
helpers_test.go:175: Cleaning up "existing-network-044425" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p existing-network-044425
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p existing-network-044425: (1.983188905s)
--- PASS: TestKicExistingNetwork (32.08s)

                                                
                                    
x
+
TestKicCustomSubnet (34.44s)

                                                
                                                
=== RUN   TestKicCustomSubnet
kic_custom_network_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p custom-subnet-708719 --subnet=192.168.60.0/24
kic_custom_network_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p custom-subnet-708719 --subnet=192.168.60.0/24: (32.34144767s)
kic_custom_network_test.go:161: (dbg) Run:  docker network inspect custom-subnet-708719 --format "{{(index .IPAM.Config 0).Subnet}}"
helpers_test.go:175: Cleaning up "custom-subnet-708719" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p custom-subnet-708719
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p custom-subnet-708719: (2.068926306s)
--- PASS: TestKicCustomSubnet (34.44s)

                                                
                                    
x
+
TestKicStaticIP (36.19s)

                                                
                                                
=== RUN   TestKicStaticIP
kic_custom_network_test.go:132: (dbg) Run:  out/minikube-linux-arm64 start -p static-ip-963360 --static-ip=192.168.200.200
E0916 11:04:57.027073 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
kic_custom_network_test.go:132: (dbg) Done: out/minikube-linux-arm64 start -p static-ip-963360 --static-ip=192.168.200.200: (33.99146486s)
kic_custom_network_test.go:138: (dbg) Run:  out/minikube-linux-arm64 -p static-ip-963360 ip
helpers_test.go:175: Cleaning up "static-ip-963360" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p static-ip-963360
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p static-ip-963360: (2.043095295s)
--- PASS: TestKicStaticIP (36.19s)

                                                
                                    
x
+
TestMainNoArgs (0.05s)

                                                
                                                
=== RUN   TestMainNoArgs
main_test.go:68: (dbg) Run:  out/minikube-linux-arm64
--- PASS: TestMainNoArgs (0.05s)

                                                
                                    
x
+
TestMinikubeProfile (65.34s)

                                                
                                                
=== RUN   TestMinikubeProfile
minikube_profile_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p first-479447 --driver=docker  --container-runtime=containerd
minikube_profile_test.go:44: (dbg) Done: out/minikube-linux-arm64 start -p first-479447 --driver=docker  --container-runtime=containerd: (29.769268563s)
minikube_profile_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p second-482401 --driver=docker  --container-runtime=containerd
minikube_profile_test.go:44: (dbg) Done: out/minikube-linux-arm64 start -p second-482401 --driver=docker  --container-runtime=containerd: (30.394273522s)
minikube_profile_test.go:51: (dbg) Run:  out/minikube-linux-arm64 profile first-479447
minikube_profile_test.go:55: (dbg) Run:  out/minikube-linux-arm64 profile list -ojson
minikube_profile_test.go:51: (dbg) Run:  out/minikube-linux-arm64 profile second-482401
minikube_profile_test.go:55: (dbg) Run:  out/minikube-linux-arm64 profile list -ojson
helpers_test.go:175: Cleaning up "second-482401" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p second-482401
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p second-482401: (1.93662937s)
helpers_test.go:175: Cleaning up "first-479447" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p first-479447
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p first-479447: (1.934425682s)
--- PASS: TestMinikubeProfile (65.34s)

                                                
                                    
x
+
TestMountStart/serial/StartWithMountFirst (9.32s)

                                                
                                                
=== RUN   TestMountStart/serial/StartWithMountFirst
mount_start_test.go:98: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-1-970382 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46464 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd
mount_start_test.go:98: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-1-970382 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46464 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd: (8.315294262s)
--- PASS: TestMountStart/serial/StartWithMountFirst (9.32s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountFirst (0.25s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountFirst
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-1-970382 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountFirst (0.25s)

                                                
                                    
x
+
TestMountStart/serial/StartWithMountSecond (6.98s)

                                                
                                                
=== RUN   TestMountStart/serial/StartWithMountSecond
mount_start_test.go:98: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-2-972638 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46465 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd
mount_start_test.go:98: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-2-972638 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46465 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd: (5.978697649s)
--- PASS: TestMountStart/serial/StartWithMountSecond (6.98s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountSecond (0.26s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountSecond
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-972638 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountSecond (0.26s)

                                                
                                    
x
+
TestMountStart/serial/DeleteFirst (1.61s)

                                                
                                                
=== RUN   TestMountStart/serial/DeleteFirst
pause_test.go:132: (dbg) Run:  out/minikube-linux-arm64 delete -p mount-start-1-970382 --alsologtostderr -v=5
pause_test.go:132: (dbg) Done: out/minikube-linux-arm64 delete -p mount-start-1-970382 --alsologtostderr -v=5: (1.613721702s)
--- PASS: TestMountStart/serial/DeleteFirst (1.61s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountPostDelete (0.26s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountPostDelete
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-972638 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountPostDelete (0.26s)

                                                
                                    
x
+
TestMountStart/serial/Stop (1.2s)

                                                
                                                
=== RUN   TestMountStart/serial/Stop
mount_start_test.go:155: (dbg) Run:  out/minikube-linux-arm64 stop -p mount-start-2-972638
mount_start_test.go:155: (dbg) Done: out/minikube-linux-arm64 stop -p mount-start-2-972638: (1.200108881s)
--- PASS: TestMountStart/serial/Stop (1.20s)

                                                
                                    
x
+
TestMountStart/serial/RestartStopped (8.13s)

                                                
                                                
=== RUN   TestMountStart/serial/RestartStopped
mount_start_test.go:166: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-2-972638
mount_start_test.go:166: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-2-972638: (7.129318859s)
--- PASS: TestMountStart/serial/RestartStopped (8.13s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountPostStop (0.26s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountPostStop
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-972638 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountPostStop (0.26s)

                                                
                                    
x
+
TestMultiNode/serial/FreshStart2Nodes (69.46s)

                                                
                                                
=== RUN   TestMultiNode/serial/FreshStart2Nodes
multinode_test.go:96: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-890146 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd
multinode_test.go:96: (dbg) Done: out/minikube-linux-arm64 start -p multinode-890146 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd: (1m8.927459343s)
multinode_test.go:102: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 status --alsologtostderr
--- PASS: TestMultiNode/serial/FreshStart2Nodes (69.46s)

                                                
                                    
x
+
TestMultiNode/serial/DeployApp2Nodes (17.01s)

                                                
                                                
=== RUN   TestMultiNode/serial/DeployApp2Nodes
multinode_test.go:493: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- apply -f ./testdata/multinodes/multinode-pod-dns-test.yaml
multinode_test.go:498: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- rollout status deployment/busybox
multinode_test.go:498: (dbg) Done: out/minikube-linux-arm64 kubectl -p multinode-890146 -- rollout status deployment/busybox: (14.981352136s)
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:528: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:536: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- exec busybox-7dff88458-hf6zl -- nslookup kubernetes.io
multinode_test.go:536: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- exec busybox-7dff88458-wrnfh -- nslookup kubernetes.io
multinode_test.go:546: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- exec busybox-7dff88458-hf6zl -- nslookup kubernetes.default
multinode_test.go:546: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- exec busybox-7dff88458-wrnfh -- nslookup kubernetes.default
multinode_test.go:554: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- exec busybox-7dff88458-hf6zl -- nslookup kubernetes.default.svc.cluster.local
multinode_test.go:554: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- exec busybox-7dff88458-wrnfh -- nslookup kubernetes.default.svc.cluster.local
--- PASS: TestMultiNode/serial/DeployApp2Nodes (17.01s)

                                                
                                    
x
+
TestMultiNode/serial/PingHostFrom2Pods (0.98s)

                                                
                                                
=== RUN   TestMultiNode/serial/PingHostFrom2Pods
multinode_test.go:564: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:572: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- exec busybox-7dff88458-hf6zl -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:583: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- exec busybox-7dff88458-hf6zl -- sh -c "ping -c 1 192.168.58.1"
multinode_test.go:572: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- exec busybox-7dff88458-wrnfh -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:583: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-890146 -- exec busybox-7dff88458-wrnfh -- sh -c "ping -c 1 192.168.58.1"
--- PASS: TestMultiNode/serial/PingHostFrom2Pods (0.98s)

                                                
                                    
x
+
TestMultiNode/serial/AddNode (18.52s)

                                                
                                                
=== RUN   TestMultiNode/serial/AddNode
multinode_test.go:121: (dbg) Run:  out/minikube-linux-arm64 node add -p multinode-890146 -v 3 --alsologtostderr
E0916 11:08:33.956916 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
multinode_test.go:121: (dbg) Done: out/minikube-linux-arm64 node add -p multinode-890146 -v 3 --alsologtostderr: (17.838686573s)
multinode_test.go:127: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 status --alsologtostderr
--- PASS: TestMultiNode/serial/AddNode (18.52s)

                                                
                                    
x
+
TestMultiNode/serial/ProfileList (0.33s)

                                                
                                                
=== RUN   TestMultiNode/serial/ProfileList
multinode_test.go:143: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiNode/serial/ProfileList (0.33s)

                                                
                                    
x
+
TestMultiNode/serial/CopyFile (10.63s)

                                                
                                                
=== RUN   TestMultiNode/serial/CopyFile
multinode_test.go:184: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 status --output json --alsologtostderr
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp testdata/cp-test.txt multinode-890146:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp multinode-890146:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile3892048771/001/cp-test_multinode-890146.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp multinode-890146:/home/docker/cp-test.txt multinode-890146-m02:/home/docker/cp-test_multinode-890146_multinode-890146-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m02 "sudo cat /home/docker/cp-test_multinode-890146_multinode-890146-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp multinode-890146:/home/docker/cp-test.txt multinode-890146-m03:/home/docker/cp-test_multinode-890146_multinode-890146-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m03 "sudo cat /home/docker/cp-test_multinode-890146_multinode-890146-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp testdata/cp-test.txt multinode-890146-m02:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp multinode-890146-m02:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile3892048771/001/cp-test_multinode-890146-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp multinode-890146-m02:/home/docker/cp-test.txt multinode-890146:/home/docker/cp-test_multinode-890146-m02_multinode-890146.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146 "sudo cat /home/docker/cp-test_multinode-890146-m02_multinode-890146.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp multinode-890146-m02:/home/docker/cp-test.txt multinode-890146-m03:/home/docker/cp-test_multinode-890146-m02_multinode-890146-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m03 "sudo cat /home/docker/cp-test_multinode-890146-m02_multinode-890146-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp testdata/cp-test.txt multinode-890146-m03:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile3892048771/001/cp-test_multinode-890146-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt multinode-890146:/home/docker/cp-test_multinode-890146-m03_multinode-890146.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146 "sudo cat /home/docker/cp-test_multinode-890146-m03_multinode-890146.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 cp multinode-890146-m03:/home/docker/cp-test.txt multinode-890146-m02:/home/docker/cp-test_multinode-890146-m03_multinode-890146-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 ssh -n multinode-890146-m02 "sudo cat /home/docker/cp-test_multinode-890146-m03_multinode-890146-m02.txt"
--- PASS: TestMultiNode/serial/CopyFile (10.63s)

                                                
                                    
x
+
TestMultiNode/serial/StopNode (2.26s)

                                                
                                                
=== RUN   TestMultiNode/serial/StopNode
multinode_test.go:248: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 node stop m03
multinode_test.go:248: (dbg) Done: out/minikube-linux-arm64 -p multinode-890146 node stop m03: (1.203125981s)
multinode_test.go:254: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 status
multinode_test.go:254: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-890146 status: exit status 7 (509.786271ms)

                                                
                                                
-- stdout --
	multinode-890146
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-890146-m02
	type: Worker
	host: Running
	kubelet: Running
	
	multinode-890146-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
multinode_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 status --alsologtostderr
multinode_test.go:261: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-890146 status --alsologtostderr: exit status 7 (542.296425ms)

                                                
                                                
-- stdout --
	multinode-890146
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-890146-m02
	type: Worker
	host: Running
	kubelet: Running
	
	multinode-890146-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 11:09:05.997749 2186050 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:09:05.997881 2186050 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:09:05.997891 2186050 out.go:358] Setting ErrFile to fd 2...
	I0916 11:09:05.997896 2186050 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:09:05.998140 2186050 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 11:09:05.998337 2186050 out.go:352] Setting JSON to false
	I0916 11:09:05.998370 2186050 mustload.go:65] Loading cluster: multinode-890146
	I0916 11:09:05.998477 2186050 notify.go:220] Checking for updates...
	I0916 11:09:05.998882 2186050 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:09:05.998900 2186050 status.go:255] checking status of multinode-890146 ...
	I0916 11:09:06.000146 2186050 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:09:06.029290 2186050 status.go:330] multinode-890146 host status = "Running" (err=<nil>)
	I0916 11:09:06.029320 2186050 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:09:06.029650 2186050 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146
	I0916 11:09:06.050258 2186050 host.go:66] Checking if "multinode-890146" exists ...
	I0916 11:09:06.050962 2186050 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:09:06.051032 2186050 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146
	I0916 11:09:06.080391 2186050 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40717 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146/id_rsa Username:docker}
	I0916 11:09:06.176009 2186050 ssh_runner.go:195] Run: systemctl --version
	I0916 11:09:06.180860 2186050 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:09:06.193593 2186050 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0916 11:09:06.262859 2186050 info.go:266] docker info: {ID:EOU5:DNGX:XN6V:L2FZ:UXRM:5TWK:EVUR:KC2F:GT7Z:Y4O4:GB77:5PD3 Containers:5 ContainersRunning:4 ContainersPaused:0 ContainersStopped:1 Images:5 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:true BridgeNfIP6Tables:true Debug:false NFd:57 OomKillDisable:true NGoroutines:81 SystemTime:2024-09-16 11:09:06.251549736 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1069-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:aar
ch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214835200 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-31-251 Labels:[] ExperimentalBuild:false ServerVersion:27.2.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c Expected:7f7fdf5fed64eb6a7caf99b3e12efcf9d60e311c} RuncCommit:{ID:v1.1.14-0-g2c9f560 Expected:v1.1.14-0-g2c9f560} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> ServerErro
rs:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.16.2] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.29.2]] Warnings:<nil>}}
	I0916 11:09:06.263462 2186050 kubeconfig.go:125] found "multinode-890146" server: "https://192.168.58.2:8443"
	I0916 11:09:06.263501 2186050 api_server.go:166] Checking apiserver status ...
	I0916 11:09:06.263548 2186050 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0916 11:09:06.274602 2186050 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1444/cgroup
	I0916 11:09:06.283968 2186050 api_server.go:182] apiserver freezer: "13:freezer:/docker/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/kubepods/burstable/pode9853b0a0cf2a497fe455d2a5a3241a9/305b8895a34401a4626618470969b6ca3b591de13b0d36af0b2b2b23096ac46b"
	I0916 11:09:06.284056 2186050 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/d045dde36e30ad4efa2e6ed296e28b874c1e468dd38ce8c3b8f811b01134b6cb/kubepods/burstable/pode9853b0a0cf2a497fe455d2a5a3241a9/305b8895a34401a4626618470969b6ca3b591de13b0d36af0b2b2b23096ac46b/freezer.state
	I0916 11:09:06.293202 2186050 api_server.go:204] freezer state: "THAWED"
	I0916 11:09:06.293243 2186050 api_server.go:253] Checking apiserver healthz at https://192.168.58.2:8443/healthz ...
	I0916 11:09:06.300852 2186050 api_server.go:279] https://192.168.58.2:8443/healthz returned 200:
	ok
	I0916 11:09:06.300892 2186050 status.go:422] multinode-890146 apiserver status = Running (err=<nil>)
	I0916 11:09:06.300920 2186050 status.go:257] multinode-890146 status: &{Name:multinode-890146 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 11:09:06.300943 2186050 status.go:255] checking status of multinode-890146-m02 ...
	I0916 11:09:06.301296 2186050 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:09:06.317901 2186050 status.go:330] multinode-890146-m02 host status = "Running" (err=<nil>)
	I0916 11:09:06.317971 2186050 host.go:66] Checking if "multinode-890146-m02" exists ...
	I0916 11:09:06.318387 2186050 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-890146-m02
	I0916 11:09:06.336112 2186050 host.go:66] Checking if "multinode-890146-m02" exists ...
	I0916 11:09:06.336428 2186050 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0916 11:09:06.336482 2186050 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-890146-m02
	I0916 11:09:06.352916 2186050 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:40722 SSHKeyPath:/home/jenkins/minikube-integration/19651-2057935/.minikube/machines/multinode-890146-m02/id_rsa Username:docker}
	I0916 11:09:06.448211 2186050 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0916 11:09:06.459949 2186050 status.go:257] multinode-890146-m02 status: &{Name:multinode-890146-m02 Host:Running Kubelet:Running APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}
	I0916 11:09:06.459987 2186050 status.go:255] checking status of multinode-890146-m03 ...
	I0916 11:09:06.460351 2186050 cli_runner.go:164] Run: docker container inspect multinode-890146-m03 --format={{.State.Status}}
	I0916 11:09:06.476411 2186050 status.go:330] multinode-890146-m03 host status = "Stopped" (err=<nil>)
	I0916 11:09:06.476436 2186050 status.go:343] host is not running, skipping remaining checks
	I0916 11:09:06.476443 2186050 status.go:257] multinode-890146-m03 status: &{Name:multinode-890146-m03 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiNode/serial/StopNode (2.26s)

                                                
                                    
x
+
TestMultiNode/serial/RestartKeepsNodes (128.94s)

                                                
                                                
=== RUN   TestMultiNode/serial/RestartKeepsNodes
multinode_test.go:314: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-890146
multinode_test.go:321: (dbg) Run:  out/minikube-linux-arm64 stop -p multinode-890146
multinode_test.go:321: (dbg) Done: out/minikube-linux-arm64 stop -p multinode-890146: (25.021551132s)
multinode_test.go:326: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-890146 --wait=true -v=8 --alsologtostderr
E0916 11:10:30.736525 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
multinode_test.go:326: (dbg) Done: out/minikube-linux-arm64 start -p multinode-890146 --wait=true -v=8 --alsologtostderr: (1m43.785990258s)
multinode_test.go:331: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-890146
--- PASS: TestMultiNode/serial/RestartKeepsNodes (128.94s)

                                                
                                    
x
+
TestMultiNode/serial/StopMultiNode (24.21s)

                                                
                                                
=== RUN   TestMultiNode/serial/StopMultiNode
multinode_test.go:345: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 stop
multinode_test.go:345: (dbg) Done: out/minikube-linux-arm64 -p multinode-890146 stop: (24.032108789s)
multinode_test.go:351: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 status
multinode_test.go:351: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-890146 status: exit status 7 (96.483175ms)

                                                
                                                
-- stdout --
	multinode-890146
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	multinode-890146-m02
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
multinode_test.go:358: (dbg) Run:  out/minikube-linux-arm64 -p multinode-890146 status --alsologtostderr
multinode_test.go:358: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-890146 status --alsologtostderr: exit status 7 (83.45758ms)

                                                
                                                
-- stdout --
	multinode-890146
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	multinode-890146-m02
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0916 11:12:00.904649 2195208 out.go:345] Setting OutFile to fd 1 ...
	I0916 11:12:00.904846 2195208 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:12:00.904873 2195208 out.go:358] Setting ErrFile to fd 2...
	I0916 11:12:00.904892 2195208 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0916 11:12:00.905150 2195208 root.go:338] Updating PATH: /home/jenkins/minikube-integration/19651-2057935/.minikube/bin
	I0916 11:12:00.905378 2195208 out.go:352] Setting JSON to false
	I0916 11:12:00.905441 2195208 mustload.go:65] Loading cluster: multinode-890146
	I0916 11:12:00.905532 2195208 notify.go:220] Checking for updates...
	I0916 11:12:00.905934 2195208 config.go:182] Loaded profile config "multinode-890146": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.31.1
	I0916 11:12:00.905981 2195208 status.go:255] checking status of multinode-890146 ...
	I0916 11:12:00.906882 2195208 cli_runner.go:164] Run: docker container inspect multinode-890146 --format={{.State.Status}}
	I0916 11:12:00.923496 2195208 status.go:330] multinode-890146 host status = "Stopped" (err=<nil>)
	I0916 11:12:00.923522 2195208 status.go:343] host is not running, skipping remaining checks
	I0916 11:12:00.923530 2195208 status.go:257] multinode-890146 status: &{Name:multinode-890146 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0916 11:12:00.923561 2195208 status.go:255] checking status of multinode-890146-m02 ...
	I0916 11:12:00.923874 2195208 cli_runner.go:164] Run: docker container inspect multinode-890146-m02 --format={{.State.Status}}
	I0916 11:12:00.940418 2195208 status.go:330] multinode-890146-m02 host status = "Stopped" (err=<nil>)
	I0916 11:12:00.940442 2195208 status.go:343] host is not running, skipping remaining checks
	I0916 11:12:00.940450 2195208 status.go:257] multinode-890146-m02 status: &{Name:multinode-890146-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiNode/serial/StopMultiNode (24.21s)

                                                
                                    
x
+
TestMultiNode/serial/ValidateNameConflict (31.59s)

                                                
                                                
=== RUN   TestMultiNode/serial/ValidateNameConflict
multinode_test.go:455: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-890146
multinode_test.go:464: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-890146-m02 --driver=docker  --container-runtime=containerd
multinode_test.go:464: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p multinode-890146-m02 --driver=docker  --container-runtime=containerd: exit status 14 (74.712984ms)

                                                
                                                
-- stdout --
	* [multinode-890146-m02] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=19651
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	! Profile name 'multinode-890146-m02' is duplicated with machine name 'multinode-890146-m02' in profile 'multinode-890146'
	X Exiting due to MK_USAGE: Profile name should be unique

                                                
                                                
** /stderr **
multinode_test.go:472: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-890146-m03 --driver=docker  --container-runtime=containerd
multinode_test.go:472: (dbg) Done: out/minikube-linux-arm64 start -p multinode-890146-m03 --driver=docker  --container-runtime=containerd: (29.136134839s)
multinode_test.go:479: (dbg) Run:  out/minikube-linux-arm64 node add -p multinode-890146
multinode_test.go:479: (dbg) Non-zero exit: out/minikube-linux-arm64 node add -p multinode-890146: exit status 80 (358.30492ms)

                                                
                                                
-- stdout --
	* Adding node m03 to cluster multinode-890146 as [worker]
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to GUEST_NODE_ADD: failed to add node: Node multinode-890146-m03 already exists in multinode-890146-m03 profile
	* 
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│    * Please also attach the following file to the GitHub issue:                             │
	│    * - /tmp/minikube_node_040ea7097fd6ed71e65be9a474587f81f0ccd21d_6.log                    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯

                                                
                                                
** /stderr **
multinode_test.go:484: (dbg) Run:  out/minikube-linux-arm64 delete -p multinode-890146-m03
multinode_test.go:484: (dbg) Done: out/minikube-linux-arm64 delete -p multinode-890146-m03: (1.970405474s)
--- PASS: TestMultiNode/serial/ValidateNameConflict (31.59s)

                                                
                                    
x
+
TestScheduledStopUnix (104.98s)

                                                
                                                
=== RUN   TestScheduledStopUnix
scheduled_stop_test.go:128: (dbg) Run:  out/minikube-linux-arm64 start -p scheduled-stop-627162 --memory=2048 --driver=docker  --container-runtime=containerd
E0916 11:14:07.672562 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/functional-911502/client.crt: no such file or directory" logger="UnhandledError"
scheduled_stop_test.go:128: (dbg) Done: out/minikube-linux-arm64 start -p scheduled-stop-627162 --memory=2048 --driver=docker  --container-runtime=containerd: (28.407274009s)
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-627162 --schedule 5m
scheduled_stop_test.go:191: (dbg) Run:  out/minikube-linux-arm64 status --format={{.TimeToStop}} -p scheduled-stop-627162 -n scheduled-stop-627162
scheduled_stop_test.go:169: signal error was:  <nil>
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-627162 --schedule 15s
scheduled_stop_test.go:169: signal error was:  os: process already finished
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-627162 --cancel-scheduled
scheduled_stop_test.go:176: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-627162 -n scheduled-stop-627162
scheduled_stop_test.go:205: (dbg) Run:  out/minikube-linux-arm64 status -p scheduled-stop-627162
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-627162 --schedule 15s
scheduled_stop_test.go:169: signal error was:  os: process already finished
scheduled_stop_test.go:205: (dbg) Run:  out/minikube-linux-arm64 status -p scheduled-stop-627162
scheduled_stop_test.go:205: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p scheduled-stop-627162: exit status 7 (66.929307ms)

                                                
                                                
-- stdout --
	scheduled-stop-627162
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-627162 -n scheduled-stop-627162
scheduled_stop_test.go:176: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-627162 -n scheduled-stop-627162: exit status 7 (66.885098ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: status error: exit status 7 (may be ok)
helpers_test.go:175: Cleaning up "scheduled-stop-627162" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p scheduled-stop-627162
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p scheduled-stop-627162: (5.016547496s)
--- PASS: TestScheduledStopUnix (104.98s)

                                                
                                    
x
+
TestInsufficientStorage (10.65s)

                                                
                                                
=== RUN   TestInsufficientStorage
status_test.go:50: (dbg) Run:  out/minikube-linux-arm64 start -p insufficient-storage-024842 --memory=2048 --output=json --wait=true --driver=docker  --container-runtime=containerd
status_test.go:50: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p insufficient-storage-024842 --memory=2048 --output=json --wait=true --driver=docker  --container-runtime=containerd: exit status 26 (8.103070663s)

                                                
                                                
-- stdout --
	{"specversion":"1.0","id":"e9504f14-c272-4385-82ca-efb4b68485d4","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"0","message":"[insufficient-storage-024842] minikube v1.34.0 on Ubuntu 20.04 (arm64)","name":"Initial Minikube Setup","totalsteps":"19"}}
	{"specversion":"1.0","id":"2b87f22b-a032-4537-b80d-0c99c7753180","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_LOCATION=19651"}}
	{"specversion":"1.0","id":"9cb647a4-356d-4755-880c-f8a3e28b3b10","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true"}}
	{"specversion":"1.0","id":"54756158-093f-43ee-b8f1-e0409f768680","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig"}}
	{"specversion":"1.0","id":"8c9b4440-bc4c-447c-9c74-c41ce8fb440e","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube"}}
	{"specversion":"1.0","id":"7425ebbd-7f67-4c83-b4f0-277e50f13980","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_BIN=out/minikube-linux-arm64"}}
	{"specversion":"1.0","id":"fff14bfb-0ed7-49e4-b2a1-011a25e6ce87","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_FORCE_SYSTEMD="}}
	{"specversion":"1.0","id":"1ca51eb7-124b-4fad-b502-463b2eaefb36","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_TEST_STORAGE_CAPACITY=100"}}
	{"specversion":"1.0","id":"56c7ee6c-28c2-467c-bfb8-064a8dd179be","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_TEST_AVAILABLE_STORAGE=19"}}
	{"specversion":"1.0","id":"9cf15db0-106a-42d5-819c-243d200eb430","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"1","message":"Using the docker driver based on user configuration","name":"Selecting Driver","totalsteps":"19"}}
	{"specversion":"1.0","id":"7b204548-fb32-4674-905a-cc8740425f44","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"Using Docker driver with root privileges"}}
	{"specversion":"1.0","id":"3cf6ad9e-ce2c-47d0-b84c-ba2a15248ee4","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"3","message":"Starting \"insufficient-storage-024842\" primary control-plane node in \"insufficient-storage-024842\" cluster","name":"Starting Node","totalsteps":"19"}}
	{"specversion":"1.0","id":"c4e1f455-a9c1-4d88-8cfc-f72df06d16dd","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"5","message":"Pulling base image v0.0.45-1726358845-19644 ...","name":"Pulling Base Image","totalsteps":"19"}}
	{"specversion":"1.0","id":"bb187786-de16-4bd6-812c-bdfba36574d4","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"8","message":"Creating docker container (CPUs=2, Memory=2048MB) ...","name":"Creating Container","totalsteps":"19"}}
	{"specversion":"1.0","id":"13c7e3dc-68fa-4dd9-9932-2e980fd01a84","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.error","datacontenttype":"application/json","data":{"advice":"Try one or more of the following to free up space on the device:\n\t\n\t\t\t1. Run \"docker system prune\" to remove unused Docker data (optionally with \"-a\")\n\t\t\t2. Increase the storage allocated to Docker for Desktop by clicking on:\n\t\t\t\tDocker icon \u003e Preferences \u003e Resources \u003e Disk Image Size\n\t\t\t3. Run \"minikube ssh -- docker system prune\" if using the Docker container runtime","exitcode":"26","issues":"https://github.com/kubernetes/minikube/issues/9024","message":"Docker is out of disk space! (/var is at 100% of capacity). You can pass '--force' to skip this check.","name":"RSRC_DOCKER_STORAGE","url":""}}

                                                
                                                
-- /stdout --
status_test.go:76: (dbg) Run:  out/minikube-linux-arm64 status -p insufficient-storage-024842 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p insufficient-storage-024842 --output=json --layout=cluster: exit status 7 (319.354837ms)

                                                
                                                
-- stdout --
	{"Name":"insufficient-storage-024842","StatusCode":507,"StatusName":"InsufficientStorage","StatusDetail":"/var is almost out of disk space","Step":"Creating Container","StepDetail":"Creating docker container (CPUs=2, Memory=2048MB) ...","BinaryVersion":"v1.34.0","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":500,"StatusName":"Error"}},"Nodes":[{"Name":"insufficient-storage-024842","StatusCode":507,"StatusName":"InsufficientStorage","Components":{"apiserver":{"Name":"apiserver","StatusCode":405,"StatusName":"Stopped"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
** stderr ** 
	E0916 11:15:42.357238 2210363 status.go:417] kubeconfig endpoint: get endpoint: "insufficient-storage-024842" does not appear in /home/jenkins/minikube-integration/19651-2057935/kubeconfig

                                                
                                                
** /stderr **
status_test.go:76: (dbg) Run:  out/minikube-linux-arm64 status -p insufficient-storage-024842 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p insufficient-storage-024842 --output=json --layout=cluster: exit status 7 (299.999425ms)

                                                
                                                
-- stdout --
	{"Name":"insufficient-storage-024842","StatusCode":507,"StatusName":"InsufficientStorage","StatusDetail":"/var is almost out of disk space","BinaryVersion":"v1.34.0","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":500,"StatusName":"Error"}},"Nodes":[{"Name":"insufficient-storage-024842","StatusCode":507,"StatusName":"InsufficientStorage","Components":{"apiserver":{"Name":"apiserver","StatusCode":405,"StatusName":"Stopped"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
** stderr ** 
	E0916 11:15:42.660977 2210425 status.go:417] kubeconfig endpoint: get endpoint: "insufficient-storage-024842" does not appear in /home/jenkins/minikube-integration/19651-2057935/kubeconfig
	E0916 11:15:42.672508 2210425 status.go:560] unable to read event log: stat: stat /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/insufficient-storage-024842/events.json: no such file or directory

                                                
                                                
** /stderr **
helpers_test.go:175: Cleaning up "insufficient-storage-024842" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p insufficient-storage-024842
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p insufficient-storage-024842: (1.924971169s)
--- PASS: TestInsufficientStorage (10.65s)

                                                
                                    
x
+
TestRunningBinaryUpgrade (82.26s)

                                                
                                                
=== RUN   TestRunningBinaryUpgrade
=== PAUSE TestRunningBinaryUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestRunningBinaryUpgrade
version_upgrade_test.go:120: (dbg) Run:  /tmp/minikube-v1.26.0.2330603002 start -p running-upgrade-718358 --memory=2200 --vm-driver=docker  --container-runtime=containerd
E0916 11:21:37.029121 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
version_upgrade_test.go:120: (dbg) Done: /tmp/minikube-v1.26.0.2330603002 start -p running-upgrade-718358 --memory=2200 --vm-driver=docker  --container-runtime=containerd: (45.214582155s)
version_upgrade_test.go:130: (dbg) Run:  out/minikube-linux-arm64 start -p running-upgrade-718358 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:130: (dbg) Done: out/minikube-linux-arm64 start -p running-upgrade-718358 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (33.984170589s)
helpers_test.go:175: Cleaning up "running-upgrade-718358" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p running-upgrade-718358
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p running-upgrade-718358: (2.3398005s)
--- PASS: TestRunningBinaryUpgrade (82.26s)

                                                
                                    
x
+
TestMissingContainerUpgrade (143.3s)

                                                
                                                
=== RUN   TestMissingContainerUpgrade
=== PAUSE TestMissingContainerUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestMissingContainerUpgrade
version_upgrade_test.go:309: (dbg) Run:  /tmp/minikube-v1.26.0.122446980 start -p missing-upgrade-179058 --memory=2200 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:309: (dbg) Done: /tmp/minikube-v1.26.0.122446980 start -p missing-upgrade-179058 --memory=2200 --driver=docker  --container-runtime=containerd: (43.788906949s)
version_upgrade_test.go:318: (dbg) Run:  docker stop missing-upgrade-179058
version_upgrade_test.go:318: (dbg) Done: docker stop missing-upgrade-179058: (12.717127283s)
version_upgrade_test.go:323: (dbg) Run:  docker rm missing-upgrade-179058
version_upgrade_test.go:329: (dbg) Run:  out/minikube-linux-arm64 start -p missing-upgrade-179058 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
E0916 11:18:33.955967 2063326 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/19651-2057935/.minikube/profiles/addons-451841/client.crt: no such file or directory" logger="UnhandledError"
version_upgrade_test.go:329: (dbg) Done: out/minikube-linux-arm64 start -p missing-upgrade-179058 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (1m23.869312174s)
helpers_test.go:175: Cleaning up "missing-upgrade-179058" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p missing-upgrade-179058
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p missing-upgrade-179058: (2.27865068s)
--- PASS: TestMissingContainerUpgrade (143.30s)

                                                
                                    
x
+
TestPause/serial/Start (103.54s)

                                                
                                                
=== RUN   TestPause/serial/Start
pause_test.go:80: (dbg) Run:  out/minikube-linux-arm64 start -p pause-486162 --memory=2048 --install-addons=false --wait=all --driver=docker  --container-runtime=containerd
pause_test.go:80: (dbg) Done: out/minikube-linux-arm64 start -p pause-486162 --memory=2048 --install-addons=false --wait=all --driver=docker  --container-runtime=containerd: (1m43.541878379s)
--- PASS: TestPause/serial/Start (103.54s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartNoK8sWithVersion (0.09s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartNoK8sWithVersion
no_kubernetes_test.go:83: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-960900 --no-kubernetes --kubernetes-version=1.20 --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:83: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p NoKubernetes-960900 --no-kubernetes --kubernetes-version=1.20 --driver=docker  --container-runtime=containerd: exit status 14 (93.522585ms)

                                                
                                                
-- stdout --
	* [NoKubernetes-960900] minikube v1.34.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=19651
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/19651-2057935/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/19651-2057935/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to MK_USAGE: cannot specify --kubernetes-version with --no-kubernetes,
	to unset a global config run:
	
	$ minikube config unset kubernetes-version

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/StartNoK8sWithVersion (0.09s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartWithK8s (44.67s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartWithK8s
no_kubernetes_test.go:95: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-960900 --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:95: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-960900 --driver=docker  --container-runtime=containerd: (44.216511034s)
no_kubernetes_test.go:200: (dbg) Run:  out/minikube-linux-arm64 -p NoKubernetes-960900 status -o json
--- PASS: TestNoKubernetes/serial/StartWithK8s (44.67s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartWithStopK8s (8.97s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartWithStopK8s
no_kubernetes_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-960900 --no-kubernetes --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-960900 --no-kubernetes --driver=docker  --container-runtime=containerd: (6.755846948s)
no_kubernetes_test.go:200: (dbg) Run:  out/minikube-linux-arm64 -p NoKubernetes-960900 status -o json
no_kubernetes_test.go:200: (dbg) Non-zero exit: out/minikube-linux-arm64 -p NoKubernetes-960900 status -o json: exit status 2 (313.736037ms)

                                                
                                                
-- stdout --
	{"Name":"NoKubernetes-960900","Host":"Running","Kubelet":"Stopped","APIServer":"Stopped","Kubeconfig":"Configured","Worker":false}

                                                
                                                
-- /stdout --
no_kubernetes_test.go:124: (dbg) Run:  out/minikube-linux-arm64 delete -p NoKubernetes-960900
no_kubernetes_test.go:124: (dbg) Done: out/minikube-linux-arm64 delete -p NoKubernetes-960900: (1.904395705s)
--- PASS: TestNoKubernetes/serial/StartWithStopK8s (8.97s)

                                                
                                    
x
+
TestNoKubernetes/serial/Start (7.17s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/Start
no_kubernetes_test.go:136: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-960900 --no-kubernetes --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:136: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-960900 --no-kubernetes --driver=docker  --container-runtime=containerd: (7.170606998s)
--- PASS: TestNoKubernetes/serial/Start (7.17s)

                                                
                                    
x
+
TestNoKubernetes/serial/VerifyK8sNotRunning (0.27s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/VerifyK8sNotRunning
no_kubernetes_test.go:147: (dbg) Run:  out/minikube-linux-arm64 ssh -p NoKubernetes-960900 "sudo systemctl is-active --quiet service kubelet"
no_kubernetes_test.go:147: (dbg) Non-zero exit: out/minikube-linux-arm64 ssh -p NoKubernetes-960900 "sudo systemctl is-active --quiet service kubelet": exit status 1 (267.72946ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/VerifyK8sNotRunning (0.27s)

                                                
                                    
x
+
TestNoKubernetes/serial/ProfileList (0.98s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/ProfileList
no_kubernetes_test.go:169: (dbg) Run:  out/minikube-linux-arm64 profile list
no_kubernetes_test.go:179: (dbg) Run:  out/minikube-linux-arm64 profile list --output=json
--- PASS: TestNoKubernetes/serial/ProfileList (0.98s)

                                                
                                    
x
+
TestNoKubernetes/serial/Stop (1.22s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/Stop
no_kubernetes_test.go:158: (dbg) Run:  out/minikube-linux-arm64 stop -p NoKubernetes-960900
no_kubernetes_test.go:158: (dbg) Done: out/minikube-linux-arm64 stop -p NoKubernetes-960900: (1.219388737s)
--- PASS: TestNoKubernetes/serial/Stop (1.22s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartNoArgs (7.21s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartNoArgs
no_kubernetes_test.go:191: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-960900 --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:191: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-960900 --driver=docker  --container-runtime=containerd: (7.208862952s)
--- PASS: TestNoKubernetes/serial/StartNoArgs (7.21s)

                                                
                                    
x
+
TestNoKubernetes/serial/VerifyK8sNotRunningSecond (0.31s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/VerifyK8sNotRunningSecond
no_kubernetes_test.go:147: (dbg) Run:  out/minikube-linux-arm64 ssh -p NoKubernetes-960900 "sudo systemctl is-active --quiet service kubelet"
no_kubernetes_test.go:147: (dbg) Non-zero exit: out/minikube-linux-arm64 ssh -p NoKubernetes-960900 "sudo systemctl is-active --quiet service kubelet": exit status 1 (308.980901ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/VerifyK8sNotRunningSecond (0.31s)

                                                
                                    
x
+
TestPause/serial/SecondStartNoReconfiguration (8.21s)

                                                
                                                
=== RUN   TestPause/serial/SecondStartNoReconfiguration
pause_test.go:92: (dbg) Run:  out/minikube-linux-arm64 start -p pause-486162 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
pause_test.go:92: (dbg) Done: out/minikube-linux-arm64 start -p pause-486162 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (8.191445505s)
--- PASS: TestPause/serial/SecondStartNoReconfiguration (8.21s)

                                                
                                    
x
+
TestPause/serial/Pause (0.85s)

                                                
                                                
=== RUN   TestPause/serial/Pause
pause_test.go:110: (dbg) Run:  out/minikube-linux-arm64 pause -p pause-486162 --alsologtostderr -v=5
--- PASS: TestPause/serial/Pause (0.85s)

                                                
                                    
x
+
TestPause/serial/VerifyStatus (0.44s)

                                                
                                                
=== RUN   TestPause/serial/VerifyStatus
status_test.go:76: (dbg) Run:  out/minikube-linux-arm64 status -p pause-486162 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p pause-486162 --output=json --layout=cluster: exit status 2 (444.733225ms)

                                                
                                                
-- stdout --
	{"Name":"pause-486162","StatusCode":418,"StatusName":"Paused","Step":"Done","StepDetail":"* Paused 7 containers in: kube-system, kubernetes-dashboard, storage-gluster, istio-operator","BinaryVersion":"v1.34.0","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":200,"StatusName":"OK"}},"Nodes":[{"Name":"pause-486162","StatusCode":200,"StatusName":"OK","Components":{"apiserver":{"Name":"apiserver","StatusCode":418,"StatusName":"Paused"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
--- PASS: TestPause/serial/VerifyStatus (0.44s)

                                                
                                    
x
+
TestPause/serial/Unpause (1.06s)

                                                
                                                
=== RUN   TestPause/serial/Unpause
pause_test.go:121: (dbg) Run:  out/minikube-linux-arm64 unpause -p pause-486162 --alsologtostderr -v=5
pause_test.go:121: (dbg) Done: out/minikube-linux-arm64 unpause -p pause-486162 --alsologtostderr -v=5: (1.059924209s)
--- PASS: TestPause/serial/Unpause (1.06s)

                                                
                                    
x
+
TestPause/serial/PauseAgain (1.07s)

                                                
                                                
=== RUN   TestPause/serial/PauseAgain
pause_test.go:110: (dbg) Run:  out/minikube-linux-arm64 pause -p pause-486162 --alsologtostderr -v=5
pause_test.go:110: (dbg) Done: out/minikube-linux-arm64 pause -p pause-486162 --alsologtostderr -v=5: (1.073090386s)
--- PASS: TestPause/serial/PauseAgain (1.07s)

                                                
                                    
x
+
TestPause/serial/DeletePaused (2.81s)

                                                
                                                
=== RUN   TestPause/serial/DeletePaused
pause_test.go:132: (dbg) Run:  out/minikube-linux-arm64 delete -p pause-486162 --alsologtostderr -v=5
pause_test.go:132: (dbg) Done: out/minikube-linux-arm64 delete -p pause-486162 --alsologtostderr -v=5: (2.811481253s)
--- PASS: TestPause/serial/DeletePaused (2.81s)

                                                
                                    
x
+
TestPause/serial/VerifyDeletedResources (3.16s)

                                                
                                                
=== RUN   TestPause/serial/VerifyDeletedResources
pause_test.go:142: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
pause_test.go:142: (dbg) Done: out/minikube-linux-arm64 profile list --output json: (3.102818387s)
pause_test.go:168: (dbg) Run:  docker ps -a
pause_test.go:173: (dbg) Run:  docker volume inspect pause-486162
pause_test.go:173: (dbg) Non-zero exit: docker volume inspect pause-486162: exit status 1 (17.325777ms)

                                                
                                                
-- stdout --
	[]

                                                
                                                
-- /stdout --
** stderr ** 
	Error response from daemon: get pause-486162: no such volume

                                                
                                                
** /stderr **
pause_test.go:178: (dbg) Run:  docker network ls
--- PASS: TestPause/serial/VerifyDeletedResources (3.16s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/Setup (0.61s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/Setup
--- PASS: TestStoppedBinaryUpgrade/Setup (0.61s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/Upgrade (106.99s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/Upgrade
version_upgrade_test.go:183: (dbg) Run:  /tmp/minikube-v1.26.0.1390005059 start -p stopped-upgrade-951684 --memory=2200 --vm-driver=docker  --container-runtime=containerd
version_upgrade_test.go:183: (dbg) Done: /tmp/minikube-v1.26.0.1390005059 start -p stopped-upgrade-951684 --memory=2200 --vm-driver=docker  --container-runtime=containerd: (45.965687983s)
version_upgrade_test.go:192: (dbg) Run:  /tmp/minikube-v1.26.0.1390005059 -p stopped-upgrade-951684 stop
version_upgrade_test.go:192: (dbg) Done: /tmp/minikube-v1.26.0.1390005059 -p stopped-upgrade-951684 stop: (19.949205983s)
version_upgrade_test.go:198: (dbg) Run:  out/minikube-linux-arm64 start -p stopped-upgrade-951684 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:198: (dbg) Done: out/minikube-linux-arm64 start -p stopped-upgrade-951684 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (41.078330474s)
--- PASS: TestStoppedBinaryUpgrade/Upgrade (106.99s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/MinikubeLogs (1.18s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/MinikubeLogs
version_upgrade_test.go:206: (dbg) Run:  out/minikube-linux-arm64 logs -p stopped-upgrade-951684
version_upgrade_test.go:206: (dbg) Done: out/minikube-linux-arm64 logs -p stopped-upgrade-951684: (1.179501539s)
--- PASS: TestStoppedBinaryUpgrade/MinikubeLogs (1.18s)

                                                
                                    

Test skip (25/229)

x
+
TestDownloadOnly/v1.20.0/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/cached-images
aaa_download_only_test.go:129: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.20.0/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/binaries
aaa_download_only_test.go:151: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.20.0/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/kubectl
aaa_download_only_test.go:167: Test for darwin and windows
--- SKIP: TestDownloadOnly/v1.20.0/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/cached-images
aaa_download_only_test.go:129: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.31.1/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/binaries
aaa_download_only_test.go:151: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.31.1/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.31.1/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.31.1/kubectl
aaa_download_only_test.go:167: Test for darwin and windows
--- SKIP: TestDownloadOnly/v1.31.1/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnlyKic (0.59s)

                                                
                                                
=== RUN   TestDownloadOnlyKic
aaa_download_only_test.go:232: (dbg) Run:  out/minikube-linux-arm64 start --download-only -p download-docker-956530 --alsologtostderr --driver=docker  --container-runtime=containerd
aaa_download_only_test.go:244: Skip for arm64 platform. See https://github.com/kubernetes/minikube/issues/10144
helpers_test.go:175: Cleaning up "download-docker-956530" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p download-docker-956530
--- SKIP: TestDownloadOnlyKic (0.59s)

                                                
                                    
x
+
TestOffline (0s)

                                                
                                                
=== RUN   TestOffline
=== PAUSE TestOffline

                                                
                                                

                                                
                                                
=== CONT  TestOffline
aab_offline_test.go:35: skipping TestOffline - only docker runtime supported on arm64. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestOffline (0.00s)

                                                
                                    
x
+
TestAddons/parallel/HelmTiller (0s)

                                                
                                                
=== RUN   TestAddons/parallel/HelmTiller
=== PAUSE TestAddons/parallel/HelmTiller

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/HelmTiller
addons_test.go:446: skip Helm test on arm64
--- SKIP: TestAddons/parallel/HelmTiller (0.00s)

                                                
                                    
x
+
TestAddons/parallel/Olm (0s)

                                                
                                                
=== RUN   TestAddons/parallel/Olm
=== PAUSE TestAddons/parallel/Olm

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Olm
addons_test.go:500: Skipping OLM addon test until https://github.com/operator-framework/operator-lifecycle-manager/issues/2534 is resolved
--- SKIP: TestAddons/parallel/Olm (0.00s)

                                                
                                    
x
+
TestDockerFlags (0s)

                                                
                                                
=== RUN   TestDockerFlags
docker_test.go:41: skipping: only runs with docker container runtime, currently testing containerd
--- SKIP: TestDockerFlags (0.00s)

                                                
                                    
x
+
TestKVMDriverInstallOrUpdate (0s)

                                                
                                                
=== RUN   TestKVMDriverInstallOrUpdate
driver_install_or_update_test.go:45: Skip if arm64. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestKVMDriverInstallOrUpdate (0.00s)

                                                
                                    
x
+
TestHyperKitDriverInstallOrUpdate (0s)

                                                
                                                
=== RUN   TestHyperKitDriverInstallOrUpdate
driver_install_or_update_test.go:105: Skip if not darwin.
--- SKIP: TestHyperKitDriverInstallOrUpdate (0.00s)

                                                
                                    
x
+
TestHyperkitDriverSkipUpgrade (0s)

                                                
                                                
=== RUN   TestHyperkitDriverSkipUpgrade
driver_install_or_update_test.go:169: Skip if not darwin.
--- SKIP: TestHyperkitDriverSkipUpgrade (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/MySQL (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/MySQL
=== PAUSE TestFunctional/parallel/MySQL

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1787: arm64 is not supported by mysql. Skip the test. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestFunctional/parallel/MySQL (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/DockerEnv (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/DockerEnv
=== PAUSE TestFunctional/parallel/DockerEnv

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DockerEnv
functional_test.go:463: only validate docker env with docker container runtime, currently testing containerd
--- SKIP: TestFunctional/parallel/DockerEnv (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/PodmanEnv (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/PodmanEnv
=== PAUSE TestFunctional/parallel/PodmanEnv

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PodmanEnv
functional_test.go:550: only validate podman env with docker container runtime, currently testing containerd
--- SKIP: TestFunctional/parallel/PodmanEnv (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0.00s)

                                                
                                    
x
+
TestGvisorAddon (0s)

                                                
                                                
=== RUN   TestGvisorAddon
gvisor_addon_test.go:34: skipping test because --gvisor=false
--- SKIP: TestGvisorAddon (0.00s)

                                                
                                    
x
+
TestImageBuild (0s)

                                                
                                                
=== RUN   TestImageBuild
image_test.go:33: 
--- SKIP: TestImageBuild (0.00s)

                                                
                                    
x
+
TestChangeNoneUser (0s)

                                                
                                                
=== RUN   TestChangeNoneUser
none_test.go:38: Test requires none driver and SUDO_USER env to not be empty
--- SKIP: TestChangeNoneUser (0.00s)

                                                
                                    
x
+
TestScheduledStopWindows (0s)

                                                
                                                
=== RUN   TestScheduledStopWindows
scheduled_stop_test.go:42: test only runs on windows
--- SKIP: TestScheduledStopWindows (0.00s)

                                                
                                    
x
+
TestSkaffold (0s)

                                                
                                                
=== RUN   TestSkaffold
skaffold_test.go:45: skaffold requires docker-env, currently testing containerd container runtime
--- SKIP: TestSkaffold (0.00s)

                                                
                                    
Copied to clipboard